From 8c194cac8d9538657443c7304525d922925b9e80 Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Wed, 27 Aug 2025 17:25:44 +0200 Subject: [PATCH 1/4] feat(core): Instrument tool calls for Anthropic AI --- .../anthropic/scenario-stream-tools.mjs | 110 +++++++++++++++++ .../tracing/anthropic/scenario-tools.mjs | 66 ++++++++++ .../suites/tracing/anthropic/test.ts | 55 +++++++++ .../core/src/utils/anthropic-ai/constants.ts | 1 + packages/core/src/utils/anthropic-ai/index.ts | 21 +++- .../core/src/utils/anthropic-ai/streaming.ts | 115 ++++++++++++++++-- packages/core/src/utils/anthropic-ai/types.ts | 28 ++++- 7 files changed, 380 insertions(+), 16 deletions(-) create mode 100644 dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs create mode 100644 dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs new file mode 100644 index 000000000000..8fec227cde06 --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs @@ -0,0 +1,110 @@ +import { instrumentAnthropicAiClient } from '@sentry/core'; +import * as Sentry from '@sentry/node'; + +function createMockStreamEvents(model = 'claude-3-haiku-20240307') { + async function* generator() { + // initial message metadata with id/model and input tokens + yield { + type: 'content_block_start', + message: { + id: 'msg_stream_tool_1', + type: 'message', + role: 'assistant', + model, + content: [], + stop_reason: 'end_turn', + usage: { input_tokens: 11 }, + }, + }; + + // streamed text + yield { type: 'content_block_delta', delta: { text: 'Starting tool...' } }; + + // tool_use streamed via partial json + yield { type: 'content_block_start', index: 0, content_block: { type: 'tool_use', id: 'tool_weather_2', name: 'weather' } }; + yield { type: 'content_block_delta', index: 0, delta: { partial_json: '{"city":' } }; + yield { type: 'content_block_delta', index: 0, delta: { partial_json: '"Paris"}' } }; + yield { type: 'content_block_stop', index: 0 }; + + // more text + yield { type: 'content_block_delta', delta: { text: 'Done.' } }; + + // final usage + yield { type: 'message_delta', usage: { output_tokens: 9 } }; + } + return generator(); +} + +class MockAnthropic { + constructor(config) { + this.apiKey = config.apiKey; + this.messages = { + create: this._messagesCreate.bind(this), + stream: this._messagesStream.bind(this), + }; + } + + async _messagesCreate(params) { + await new Promise(resolve => setTimeout(resolve, 5)); + if (params?.stream) { + return createMockStreamEvents(params.model); + } + return { + id: 'msg_mock_no_stream', + type: 'message', + model: params.model, + role: 'assistant', + content: [{ type: 'text', text: 'No stream' }], + usage: { input_tokens: 2, output_tokens: 3 }, + }; + } + + async _messagesStream(params) { + await new Promise(resolve => setTimeout(resolve, 5)); + return createMockStreamEvents(params?.model); + } +} + +async function run() { + await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { + const mockClient = new MockAnthropic({ apiKey: 'mock-api-key' }); + const client = instrumentAnthropicAiClient(mockClient); + + // stream via create(stream:true) + const stream1 = await client.messages.create({ + model: 'claude-3-haiku-20240307', + messages: [{ role: 'user', content: 'Need the weather' }], + tools: [ + { + name: 'weather', + description: 'Get weather', + input_schema: { type: 'object', properties: { city: { type: 'string' } }, required: ['city'] }, + }, + ], + stream: true, + }); + for await (const _ of stream1) { + void _; + } + + // stream via messages.stream + const stream2 = await client.messages.stream({ + model: 'claude-3-haiku-20240307', + messages: [{ role: 'user', content: 'Need the weather' }], + tools: [ + { + name: 'weather', + description: 'Get weather', + input_schema: { type: 'object', properties: { city: { type: 'string' } }, required: ['city'] }, + }, + ], + }); + for await (const _ of stream2) { + void _; + } + }); +} + +run(); + + diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs new file mode 100644 index 000000000000..540ed95bfc0c --- /dev/null +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs @@ -0,0 +1,66 @@ +import { instrumentAnthropicAiClient } from '@sentry/core'; +import * as Sentry from '@sentry/node'; + +class MockAnthropic { + constructor(config) { + this.apiKey = config.apiKey; + + this.messages = { + create: this._messagesCreate.bind(this), + }; + } + + async _messagesCreate(params) { + await new Promise(resolve => setTimeout(resolve, 5)); + + return { + id: 'msg_mock_tool_1', + type: 'message', + model: params.model, + role: 'assistant', + content: [ + { type: 'text', text: 'Let me check the weather.' }, + { + type: 'tool_use', + id: 'tool_weather_1', + name: 'weather', + input: { city: 'Paris' }, + }, + { type: 'text', text: 'It is sunny.' }, + ], + stop_reason: 'end_turn', + stop_sequence: null, + usage: { + input_tokens: 5, + output_tokens: 7, + }, + }; + } +} + +async function run() { + await Sentry.startSpan({ op: 'function', name: 'main' }, async () => { + const mockClient = new MockAnthropic({ apiKey: 'mock-api-key' }); + const client = instrumentAnthropicAiClient(mockClient); + + await client.messages.create({ + model: 'claude-3-haiku-20240307', + messages: [{ role: 'user', content: 'What is the weather?' }], + tools: [ + { + name: 'weather', + description: 'Get the weather by city', + input_schema: { + type: 'object', + properties: { city: { type: 'string' } }, + required: ['city'], + }, + }, + ], + }); + }); +} + +run(); + + diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts b/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts index 9b8c7219000d..35252f574003 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts @@ -293,4 +293,59 @@ describe('Anthropic integration', () => { await createRunner().ignore('event').expect({ transaction: EXPECTED_STREAM_SPANS_PII_TRUE }).start().completed(); }); }); + + // Non-streaming tool calls + available tools (PII true) + createEsmAndCjsTests(__dirname, 'scenario-tools.mjs', 'instrument-with-pii.mjs', (createRunner, test) => { + test('non-streaming sets available tools and tool calls with PII', async () => { + const EXPECTED_TOOLS_JSON = + '[{"name":"weather","description":"Get the weather by city","input_schema":{"type":"object","properties":{"city":{"type":"string"}},"required":["city"]}}]'; + const EXPECTED_TOOL_CALLS_JSON = + '[{"type":"tool_use","id":"tool_weather_1","name":"weather","input":{"city":"Paris"}}]'; + await createRunner() + .ignore('event') + .expect({ + transaction: { + spans: expect.arrayContaining([ + expect.objectContaining({ + op: 'gen_ai.messages', + data: expect.objectContaining({ + 'gen_ai.request.available_tools': EXPECTED_TOOLS_JSON, + 'gen_ai.response.tool_calls': EXPECTED_TOOL_CALLS_JSON, + }), + }), + ]), + }, + }) + .start() + .completed(); + }); + }); + + // Streaming tool calls + available tools (PII true) + createEsmAndCjsTests(__dirname, 'scenario-stream-tools.mjs', 'instrument-with-pii.mjs', (createRunner, test) => { + test('streaming sets available tools and tool calls with PII', async () => { + const EXPECTED_TOOLS_JSON = + '[{"name":"weather","description":"Get weather","input_schema":{"type":"object","properties":{"city":{"type":"string"}},"required":["city"]}}]'; + const EXPECTED_TOOL_CALLS_JSON = + '[{"type":"tool_use","id":"tool_weather_2","name":"weather","input":{"city":"Paris"}}]'; + await createRunner() + .ignore('event') + .expect({ + transaction: { + spans: expect.arrayContaining([ + expect.objectContaining({ + description: expect.stringContaining('stream-response'), + op: 'gen_ai.messages', + data: expect.objectContaining({ + 'gen_ai.request.available_tools': EXPECTED_TOOLS_JSON, + 'gen_ai.response.tool_calls': EXPECTED_TOOL_CALLS_JSON, + }), + }), + ]), + }, + }) + .start() + .completed(); + }); + }); }); diff --git a/packages/core/src/utils/anthropic-ai/constants.ts b/packages/core/src/utils/anthropic-ai/constants.ts index 1e20745e0f1f..7e6c66196a82 100644 --- a/packages/core/src/utils/anthropic-ai/constants.ts +++ b/packages/core/src/utils/anthropic-ai/constants.ts @@ -9,4 +9,5 @@ export const ANTHROPIC_AI_INSTRUMENTED_METHODS = [ 'models.get', 'completions.create', 'models.retrieve', + 'beta.messages.create', ] as const; diff --git a/packages/core/src/utils/anthropic-ai/index.ts b/packages/core/src/utils/anthropic-ai/index.ts index 2ed95be76843..c54fdc2a8a9c 100644 --- a/packages/core/src/utils/anthropic-ai/index.ts +++ b/packages/core/src/utils/anthropic-ai/index.ts @@ -8,6 +8,7 @@ import { ANTHROPIC_AI_RESPONSE_TIMESTAMP_ATTRIBUTE, GEN_AI_OPERATION_NAME_ATTRIBUTE, GEN_AI_PROMPT_ATTRIBUTE, + GEN_AI_REQUEST_AVAILABLE_TOOLS_ATTRIBUTE, GEN_AI_REQUEST_FREQUENCY_PENALTY_ATTRIBUTE, GEN_AI_REQUEST_MAX_TOKENS_ATTRIBUTE, GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, @@ -19,6 +20,7 @@ import { GEN_AI_RESPONSE_ID_ATTRIBUTE, GEN_AI_RESPONSE_MODEL_ATTRIBUTE, GEN_AI_RESPONSE_TEXT_ATTRIBUTE, + GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE, GEN_AI_SYSTEM_ATTRIBUTE, } from '../ai/gen-ai-attributes'; import { buildMethodPath, getFinalOperationName, getSpanOperation, setTokenUsageAttributes } from '../ai/utils'; @@ -31,6 +33,7 @@ import type { AnthropicAiOptions, AnthropicAiResponse, AnthropicAiStreamingEvent, + ContentBlock, } from './types'; import { shouldInstrument } from './utils'; @@ -46,6 +49,9 @@ function extractRequestAttributes(args: unknown[], methodPath: string): Record 0 && typeof args[0] === 'object' && args[0] !== null) { const params = args[0] as Record; + if (params.tools && Array.isArray(params.tools)) { + attributes[GEN_AI_REQUEST_AVAILABLE_TOOLS_ATTRIBUTE] = JSON.stringify(params.tools); + } attributes[GEN_AI_REQUEST_MODEL_ATTRIBUTE] = params.model ?? 'unknown'; if ('temperature' in params) attributes[GEN_AI_REQUEST_TEMPERATURE_ATTRIBUTE] = params.temperature; @@ -96,10 +102,21 @@ function addResponseAttributes(span: Span, response: AnthropicAiResponse, record if (Array.isArray(response.content)) { span.setAttributes({ [GEN_AI_RESPONSE_TEXT_ATTRIBUTE]: response.content - .map((item: { text: string | undefined }) => item.text) - .filter((text): text is string => text !== undefined) + .map((item: ContentBlock) => item.text) + .filter(text => !!text) .join(''), }); + + const toolCalls: Array = []; + + for (const item of response.content) { + if (item.type === 'tool_use' || item.type === 'server_tool_use') { + toolCalls.push(item); + } + } + if (toolCalls.length > 0) { + span.setAttributes({ [GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE]: JSON.stringify(toolCalls) }); + } } } // Completions.create diff --git a/packages/core/src/utils/anthropic-ai/streaming.ts b/packages/core/src/utils/anthropic-ai/streaming.ts index 8ebbfc0b42cd..0cecd2aca573 100644 --- a/packages/core/src/utils/anthropic-ai/streaming.ts +++ b/packages/core/src/utils/anthropic-ai/streaming.ts @@ -7,6 +7,7 @@ import { GEN_AI_RESPONSE_MODEL_ATTRIBUTE, GEN_AI_RESPONSE_STREAMING_ATTRIBUTE, GEN_AI_RESPONSE_TEXT_ATTRIBUTE, + GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE, } from '../ai/gen-ai-attributes'; import { setTokenUsageAttributes } from '../ai/utils'; import type { AnthropicAiStreamingEvent } from './types'; @@ -32,6 +33,17 @@ interface StreamingState { cacheCreationInputTokens: number | undefined; /** Number of cache read input tokens used. */ cacheReadInputTokens: number | undefined; + /** Accumulated tool calls (finalized) */ + toolCalls: Array>; + /** In-progress tool call blocks keyed by index */ + activeToolBlocks: Record< + number, + { + id?: string; + name?: string; + inputJsonParts: string[]; + } + >; } /** @@ -43,12 +55,7 @@ interface StreamingState { * @returns Whether an error occurred */ -function isErrorEvent( - event: AnthropicAiStreamingEvent, - state: StreamingState, - recordOutputs: boolean, - span: Span, -): boolean { +function isErrorEvent(event: AnthropicAiStreamingEvent, span: Span): boolean { if ('type' in event && typeof event.type === 'string') { // If the event is an error, set the span status and capture the error // These error events are not rejected by the API by default, but are sent as metadata of the response @@ -69,11 +76,6 @@ function isErrorEvent( }); return true; } - - if (recordOutputs && event.type === 'content_block_delta') { - const text = event.delta?.text; - if (text) state.responseTexts.push(text); - } } return false; } @@ -110,6 +112,77 @@ function handleMessageMetadata(event: AnthropicAiStreamingEvent, state: Streamin } } +/** + * Handle start of a content block (e.g., tool_use) + */ +function handleContentBlockStart(event: AnthropicAiStreamingEvent, state: StreamingState): void { + if (event.type !== 'content_block_start' || typeof event.index !== 'number' || !event.content_block) return; + if (event.content_block.type === 'tool_use' || event.content_block.type === 'server_tool_use') { + state.activeToolBlocks[event.index] = { + id: event.content_block.id, + name: event.content_block.name, + inputJsonParts: [], + }; + } +} + +/** + * Handle deltas of a content block, including input_json_delta for tool_use + */ +function handleContentBlockDelta( + event: AnthropicAiStreamingEvent, + state: StreamingState, + recordOutputs: boolean, +): void { + if (event.type !== 'content_block_delta' || !event.delta) return; + + // Accumulate tool_use input JSON deltas only when we have an index and an active tool block + if ( + typeof event.index === 'number' && + 'partial_json' in event.delta && + typeof event.delta.partial_json === 'string' + ) { + const active = state.activeToolBlocks[event.index]; + if (active) { + active.inputJsonParts.push(event.delta.partial_json); + } + } + + // Accumulate streamed response text regardless of index + if (recordOutputs && typeof event.delta.text === 'string') { + state.responseTexts.push(event.delta.text); + } +} + +/** + * Handle stop of a content block; finalize tool_use entries + */ +function handleContentBlockStop(event: AnthropicAiStreamingEvent, state: StreamingState): void { + if (event.type !== 'content_block_stop' || typeof event.index !== 'number') return; + + const active = state.activeToolBlocks[event.index]; + if (!active) return; + + const raw = active.inputJsonParts.join(''); + let parsedInput: unknown; + + try { + parsedInput = raw ? JSON.parse(raw) : {}; + } catch (error) { + parsedInput = { __unparsed: raw }; + } + + state.toolCalls.push({ + type: 'tool_use', + id: active.id, + name: active.name, + input: parsedInput, + }); + + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete state.activeToolBlocks[event.index]; +} + /** * Processes an event * @param event - The event to process @@ -128,10 +201,19 @@ function processEvent( return; } - const isError = isErrorEvent(event, state, recordOutputs, span); + const isError = isErrorEvent(event, span); if (isError) return; handleMessageMetadata(event, state); + + // Tool call events are sent via 3 separate events: + // - content_block_start (start of the tool call) + // - content_block_delta (delta aka input of the tool call) + // - content_block_stop (end of the tool call) + // We need to handle them all to capture the full tool call. + handleContentBlockStart(event, state); + handleContentBlockDelta(event, state, recordOutputs); + handleContentBlockStop(event, state); } /** @@ -153,6 +235,8 @@ export async function* instrumentStream( completionTokens: undefined, cacheCreationInputTokens: undefined, cacheReadInputTokens: undefined, + toolCalls: [], + activeToolBlocks: {}, }; try { @@ -197,6 +281,13 @@ export async function* instrumentStream( }); } + // Set tool calls if any were captured + if (recordOutputs && state.toolCalls.length > 0) { + span.setAttributes({ + [GEN_AI_RESPONSE_TOOL_CALLS_ATTRIBUTE]: JSON.stringify(state.toolCalls), + }); + } + span.end(); } } diff --git a/packages/core/src/utils/anthropic-ai/types.ts b/packages/core/src/utils/anthropic-ai/types.ts index fd533b6795bc..6ab2e790e651 100644 --- a/packages/core/src/utils/anthropic-ai/types.ts +++ b/packages/core/src/utils/anthropic-ai/types.ts @@ -16,6 +16,17 @@ export type Message = { content: string | unknown[]; }; +export type ContentBlock = { + type: 'tool_use' | 'server_tool_use' | string; + text?: string; + /** Tool name when type is tool_use */ + name?: string; + /** Tool invocation id when type is tool_use */ + id?: string; + input?: Record; + tool_use_id?: string; +}; + export type AnthropicAiResponse = { [key: string]: unknown; // Allow for additional unknown properties id: string; @@ -23,7 +34,7 @@ export type AnthropicAiResponse = { created?: number; created_at?: number; // Available for Models.retrieve messages?: Array; - content?: string; // Available for Messages.create + content?: string | Array; // Available for Messages.create completion?: string; // Available for Completions.create input_tokens?: number; // Available for Models.countTokens usage?: { @@ -87,7 +98,14 @@ export type AnthropicAiMessage = { * Streaming event type for Anthropic AI */ export type AnthropicAiStreamingEvent = { - type: 'message_delta' | 'content_block_start' | 'content_block_delta' | 'content_block_stop' | 'error'; + type: + | 'message_start' + | 'message_delta' + | 'message_stop' + | 'content_block_start' + | 'content_block_delta' + | 'content_block_stop' + | 'error'; error?: { type: string; message: string; @@ -96,9 +114,15 @@ export type AnthropicAiStreamingEvent = { delta?: { type: unknown; text?: string; + /** Present for fine-grained tool streaming */ + partial_json?: string; + stop_reason?: string; + stop_sequence?: number; }; usage?: { output_tokens: number; // Final total output tokens; emitted on the last `message_delta` event }; message?: AnthropicAiMessage; + /** Present for fine-grained tool streaming */ + content_block?: ContentBlock; }; From c4bf630e1cd78b25f0a21971faf7aff23e611db7 Mon Sep 17 00:00:00 2001 From: Rola Abuhasna Date: Thu, 4 Sep 2025 10:34:30 +0200 Subject: [PATCH 2/4] Update packages/core/src/utils/anthropic-ai/streaming.ts Co-authored-by: Andrei <168741329+andreiborza@users.noreply.github.com> --- packages/core/src/utils/anthropic-ai/streaming.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/src/utils/anthropic-ai/streaming.ts b/packages/core/src/utils/anthropic-ai/streaming.ts index 0cecd2aca573..c48dc8a6def7 100644 --- a/packages/core/src/utils/anthropic-ai/streaming.ts +++ b/packages/core/src/utils/anthropic-ai/streaming.ts @@ -168,7 +168,7 @@ function handleContentBlockStop(event: AnthropicAiStreamingEvent, state: Streami try { parsedInput = raw ? JSON.parse(raw) : {}; - } catch (error) { + } catch { parsedInput = { __unparsed: raw }; } From 4603f87f4e799dd0e8147d21cf28234628e82fd3 Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Thu, 4 Sep 2025 10:43:46 +0200 Subject: [PATCH 3/4] fix lint issue --- .../suites/tracing/anthropic/scenario-stream-tools.mjs | 2 -- .../suites/tracing/anthropic/scenario-tools.mjs | 2 -- 2 files changed, 4 deletions(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs index 8fec227cde06..8f2045ea1dde 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs @@ -106,5 +106,3 @@ async function run() { } run(); - - diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs index 540ed95bfc0c..1637a77c9dd8 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-tools.mjs @@ -62,5 +62,3 @@ async function run() { } run(); - - From 5ce395e3a038c30024c32b04d7fce3cc62860c4c Mon Sep 17 00:00:00 2001 From: RulaKhaled Date: Thu, 4 Sep 2025 11:10:34 +0200 Subject: [PATCH 4/4] lint yelling --- .../tracing/anthropic/instrument-with-pii.mjs | 5 +---- .../suites/tracing/anthropic/instrument.mjs | 5 +---- .../tracing/anthropic/scenario-stream-tools.mjs | 6 +++++- .../suites/tracing/anthropic/scenario-stream.mjs | 2 -- .../suites/tracing/anthropic/scenario.mjs | 16 ++++++---------- 5 files changed, 13 insertions(+), 21 deletions(-) diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument-with-pii.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument-with-pii.mjs index eb8b02b1cf8b..c2776c15b001 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument-with-pii.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument-with-pii.mjs @@ -8,8 +8,5 @@ Sentry.init({ tracesSampleRate: 1.0, sendDefaultPii: true, transport: loggingTransport, - integrations: [ - Sentry.anthropicAIIntegration(), - nodeContextIntegration(), - ], + integrations: [Sentry.anthropicAIIntegration(), nodeContextIntegration()], }); diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument.mjs index fa011052c50c..39f1506eb2c9 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/instrument.mjs @@ -9,8 +9,5 @@ Sentry.init({ sendDefaultPii: false, transport: loggingTransport, // Force include the integration - integrations: [ - Sentry.anthropicAIIntegration(), - nodeContextIntegration(), - ], + integrations: [Sentry.anthropicAIIntegration(), nodeContextIntegration()], }); diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs index 8f2045ea1dde..8d423fd0bbe0 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream-tools.mjs @@ -21,7 +21,11 @@ function createMockStreamEvents(model = 'claude-3-haiku-20240307') { yield { type: 'content_block_delta', delta: { text: 'Starting tool...' } }; // tool_use streamed via partial json - yield { type: 'content_block_start', index: 0, content_block: { type: 'tool_use', id: 'tool_weather_2', name: 'weather' } }; + yield { + type: 'content_block_start', + index: 0, + content_block: { type: 'tool_use', id: 'tool_weather_2', name: 'weather' }, + }; yield { type: 'content_block_delta', index: 0, delta: { partial_json: '{"city":' } }; yield { type: 'content_block_delta', index: 0, delta: { partial_json: '"Paris"}' } }; yield { type: 'content_block_stop', index: 0 }; diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream.mjs index da70a2b12467..ac5eb6019010 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario-stream.mjs @@ -101,5 +101,3 @@ async function run() { } run(); - - diff --git a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario.mjs b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario.mjs index 425d1366879e..590796931315 100644 --- a/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario.mjs +++ b/dev-packages/node-integration-tests/suites/tracing/anthropic/scenario.mjs @@ -8,7 +8,7 @@ class MockAnthropic { // Create messages object with create and countTokens methods this.messages = { create: this._messagesCreate.bind(this), - countTokens: this._messagesCountTokens.bind(this) + countTokens: this._messagesCountTokens.bind(this), }; this.models = { @@ -56,8 +56,8 @@ class MockAnthropic { // For countTokens, just return input_tokens return { - input_tokens: 15 - } + input_tokens: 15, + }; } async _modelsRetrieve(modelId) { @@ -69,7 +69,7 @@ class MockAnthropic { id: modelId, name: modelId, created_at: 1715145600, - model: modelId, // Add model field to match the check in addResponseAttributes + model: modelId, // Add model field to match the check in addResponseAttributes }; } } @@ -86,9 +86,7 @@ async function run() { await client.messages.create({ model: 'claude-3-haiku-20240307', system: 'You are a helpful assistant.', - messages: [ - { role: 'user', content: 'What is the capital of France?' }, - ], + messages: [{ role: 'user', content: 'What is the capital of France?' }], temperature: 0.7, max_tokens: 100, }); @@ -106,9 +104,7 @@ async function run() { // Third test: count tokens with cached tokens await client.messages.countTokens({ model: 'claude-3-haiku-20240307', - messages: [ - { role: 'user', content: 'What is the capital of France?' }, - ], + messages: [{ role: 'user', content: 'What is the capital of France?' }], }); // Fourth test: models.retrieve