diff --git a/.changeset/happy-shrimps-sell.md b/.changeset/happy-shrimps-sell.md new file mode 100644 index 00000000..c5a50c1f --- /dev/null +++ b/.changeset/happy-shrimps-sell.md @@ -0,0 +1,6 @@ +--- +'@openai/agents-extensions': patch +'@openai/agents-core': patch +--- + +feat: Add support for Anthropic extended thinking diff --git a/package.json b/package.json index 3d1fc563..1a402aa1 100644 --- a/package.json +++ b/package.json @@ -71,5 +71,9 @@ "verdaccio": "^6.2.1", "vitest": "^3.2.4" }, - "packageManager": "pnpm@10.20.0" + "packageManager": "pnpm@10.20.0", + "dependencies": { + "@ai-sdk/openai": "^2.0.62", + "@openai/agents-extensions": "^0.2.1" + } } diff --git a/packages/agents-core/src/types/protocol.ts b/packages/agents-core/src/types/protocol.ts index 676f1ebc..8f610655 100644 --- a/packages/agents-core/src/types/protocol.ts +++ b/packages/agents-core/src/types/protocol.ts @@ -694,6 +694,11 @@ export const StreamEventResponseCompleted = SharedBase.extend({ * The output from the model. */ output: z.array(OutputModelItem), + + /** + * The reasoning/thinking text from the model. + */ + reasoning: z.string().optional(), }), }); diff --git a/packages/agents-extensions/src/aiSdk.ts b/packages/agents-extensions/src/aiSdk.ts index 55ffec68..df660f7b 100644 --- a/packages/agents-extensions/src/aiSdk.ts +++ b/packages/agents-extensions/src/aiSdk.ts @@ -741,6 +741,7 @@ export class AiSdkModel implements Model { : ((result as any).usage?.outputTokens ?? 0)) || 0, }), output, + reasoning: (result as any).reasoning ?? undefined, providerData: result, } as const; @@ -874,6 +875,7 @@ export class AiSdkModel implements Model { let usageCompletionTokens = 0; const functionCalls: Record = {}; let textOutput: protocol.OutputText | undefined; + let reasoningText: string | undefined; for await (const part of stream) { if (!started) { @@ -922,6 +924,16 @@ export class AiSdkModel implements Model { : ((part as any).usage?.outputTokens ?? 0); break; } + case 'reasoning-delta': { + const reasoningDelta = (part as any).reasoningDelta; + if (reasoningDelta) { + if (!reasoningText) { + reasoningText = ''; + } + reasoningText += reasoningDelta; + } + break; + } case 'error': { throw part.error; } @@ -953,6 +965,7 @@ export class AiSdkModel implements Model { totalTokens: usagePromptTokens + usageCompletionTokens, }, output: outputs, + reasoning: reasoningText, }, }; diff --git a/packages/agents-extensions/test/aiSdk.test.ts b/packages/agents-extensions/test/aiSdk.test.ts index c2357714..64447d9e 100644 --- a/packages/agents-extensions/test/aiSdk.test.ts +++ b/packages/agents-extensions/test/aiSdk.test.ts @@ -820,6 +820,115 @@ describe('AiSdkModel.getResponse', () => { outputTokensDetails: [], }); }); + + test('should store reasoning in response for non-streaming text output', async () => { + const mockProviderResult = { + content: [{ type: 'text', text: 'This is the final answer.' }], + usage: { inputTokens: 10, outputTokens: 20, totalTokens: 30 }, + providerMetadata: { p: 1 }, + response: { id: 'fake-id-123' }, + finishReason: 'stop', + warnings: [], + reasoning: 'I am thinking about the answer.', + }; + + const model = new AiSdkModel( + stubModel({ + async doGenerate() { + return mockProviderResult as any; + }, + }), + ); + + const res = await withTrace('t', () => + model.getResponse({ + input: 'hi', + tools: [], + handoffs: [], + modelSettings: {}, + outputType: 'text', + tracing: false, + } as any), + ); + + expect(res.reasoning).toBeDefined(); + expect(res.reasoning).toBe( + 'I am thinking about the answer.', + ); + expect(res.responseId).toBe('fake-id-123'); + }); + + test('should store reasoning in final response_done event for streaming', async () => { + async function* mockProviderStream() { + yield { + type: 'response-metadata', + id: 'fake-stream-id-456', + }; + + yield { + type: 'reasoning-delta', + reasoningDelta: 'Step 1: I am thinking.', + }; + + yield { + type: 'text-delta', + delta: 'Here is the answer.', + }; + + yield { + type: 'reasoning-delta', + reasoningDelta: ' Step 2: More thinking.', + }; + + yield { + type: 'finish', + usage: { inputTokens: 5, outputTokens: 10, totalTokens: 15 }, + }; + } + + const model = new AiSdkModel( + stubModel({ + async doStream() { + return { + stream: mockProviderStream(), + } as any; + }, + }), + ); + + const stream = model.getStreamedResponse({ + input: 'hi', + tools: [], + handoffs: [], + modelSettings: {}, + outputType: 'text', + tracing: false, + } as any); + + const events = []; + for await (const event of stream) { + events.push(event); + } + + const finalEvent = events.find((e) => e.type === 'response_done') as + | protocol.StreamEventResponseCompleted + | undefined; + + expect(finalEvent).toBeDefined(); + + expect(finalEvent!.response.reasoning).toBeDefined(); + expect(finalEvent!.response.reasoning).toBe( + 'Step 1: I am thinking. Step 2: More thinking.', + ); + + expect(finalEvent!.response.id).toBe('fake-stream-id-456'); + expect(finalEvent!.response.usage.totalTokens).toBe(15); + + const textOutput = finalEvent!.response.output.find( + (o) => o.type === 'message' && o.content[0].type === 'output_text', + ) as any; + expect(textOutput.content[0].text).toBe('Here is the answer.'); + }); }); describe('AiSdkModel.getStreamedResponse', () => { diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 81c58231..c3baaa3e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -7,6 +7,13 @@ settings: importers: .: + dependencies: + '@ai-sdk/openai': + specifier: ^2.0.62 + version: 2.0.62(zod@3.25.76) + '@openai/agents-extensions': + specifier: ^0.2.1 + version: 0.2.1(@openai/agents@0.2.1(ws@8.18.3)(zod@3.25.76))(ws@8.18.3)(zod@3.25.76) devDependencies: '@changesets/cli': specifier: ^2.29.7 @@ -584,6 +591,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/openai@2.0.62': + resolution: {integrity: sha512-ZHUhUV6yyBBb0bCbuqAkML7nYIOWyXZYbZQ59mlr1TpIJzSHjQzF4BndZHIIieOMm4ZrpZw15Cn78BTyaIAUwQ==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/provider-utils@2.2.8': resolution: {integrity: sha512-fqhG+4sCVv8x7nFzYnFo19ryhAa3w096Kmc3hWxMQfW/TubPOmt3A6tYZhl4mUfQWWQMsuSkLrtjlWuXBVSGQA==} engines: {node: '>=18'} @@ -596,6 +609,12 @@ packages: peerDependencies: zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/provider-utils@3.0.16': + resolution: {integrity: sha512-lsWQY9aDXHitw7C1QRYIbVGmgwyT98TF3MfM8alNIXKpdJdi+W782Rzd9f1RyOfgRmZ08gJ2EYNDhWNK7RqpEA==} + engines: {node: '>=18'} + peerDependencies: + zod: ^3.25.76 || ^4.1.8 + '@ai-sdk/provider-utils@3.0.3': resolution: {integrity: sha512-kAxIw1nYmFW1g5TvE54ZB3eNtgZna0RnLjPUp1ltz1+t9xkXJIuDT4atrwfau9IbS0BOef38wqrI8CjFfQrxhw==} engines: {node: '>=18'} @@ -1256,6 +1275,36 @@ packages: resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} + '@openai/agents-core@0.2.1': + resolution: {integrity: sha512-1CYv9UPbCrT2tEj/APA1PseaChJ4Tl1Kqn0y9/ApYLrt+AKiYTJhgOxtv9+Wu9hO9+8ePu3g9Ay1IoEqlBxl+A==} + peerDependencies: + zod: ^3.25.40 || ^4.0 + peerDependenciesMeta: + zod: + optional: true + + '@openai/agents-extensions@0.2.1': + resolution: {integrity: sha512-D5EtxWPZjmv763IwUYSrsmgTxnTV8QID1zPBFqDfMdR/VK4zJ/6XvYFjhzA+z3+BT2uQTJUfZuDgs0XXYoZi3Q==} + peerDependencies: + '@openai/agents': '>=0.0.0' + ws: ^8.18.1 + zod: ^3.25.40 || ^4.0 + + '@openai/agents-openai@0.2.1': + resolution: {integrity: sha512-2LCPkdXk6aHLwI6mINlYyj/yHYxgshCpZDxhTHIiDbBxWFFBg4bUiJ2KTCH3z7vKKIzm/JFrePvQukMpJoi+XA==} + peerDependencies: + zod: ^3.25.40 || ^4.0 + + '@openai/agents-realtime@0.2.1': + resolution: {integrity: sha512-x1F8pwvA4Zz2Xt23IqLOyIFUIFafx27sq5yuGfeDP3MDdr2q59af4fXnoJ6wrNoqvjZiKO+JCdwXyYSFHqQmug==} + peerDependencies: + zod: ^3.25.40 || ^4.0 + + '@openai/agents@0.2.1': + resolution: {integrity: sha512-fUwGNZ5jC5OsM5VaFac7mYeF9bxt4EbCkfs7UuwYWsrIz2HuF0zbOLH3pk51GlcWuHmRbnbkWziTI/E+N54IMg==} + peerDependencies: + zod: ^3.25.40 || ^4.0 + '@openrouter/ai-sdk-provider@1.2.0': resolution: {integrity: sha512-stuIwq7Yb7DNmk3GuCtz+oS3nZOY4TXEV3V5KsknDGQN7Fpu3KRMQVWRc1J073xKdf0FC9EHOctSyzsACmp5Ag==} engines: {node: '>=18'} @@ -5875,6 +5924,12 @@ snapshots: '@ai-sdk/provider-utils': 3.0.12(zod@3.25.76) zod: 3.25.76 + '@ai-sdk/openai@2.0.62(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 2.0.0 + '@ai-sdk/provider-utils': 3.0.16(zod@3.25.76) + zod: 3.25.76 + '@ai-sdk/provider-utils@2.2.8(zod@3.25.76)': dependencies: '@ai-sdk/provider': 1.1.3 @@ -5889,6 +5944,13 @@ snapshots: eventsource-parser: 3.0.6 zod: 3.25.76 + '@ai-sdk/provider-utils@3.0.16(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 2.0.0 + '@standard-schema/spec': 1.0.0 + eventsource-parser: 3.0.6 + zod: 3.25.76 + '@ai-sdk/provider-utils@3.0.3(zod@3.25.76)': dependencies: '@ai-sdk/provider': 2.0.0 @@ -6680,6 +6742,65 @@ snapshots: '@nodelib/fs.scandir': 2.1.5 fastq: 1.19.1 + '@openai/agents-core@0.2.1(ws@8.18.3)(zod@3.25.76)': + dependencies: + debug: 4.4.3 + openai: 6.7.0(ws@8.18.3)(zod@3.25.76) + optionalDependencies: + '@modelcontextprotocol/sdk': 1.20.1 + zod: 3.25.76 + transitivePeerDependencies: + - supports-color + - ws + + '@openai/agents-extensions@0.2.1(@openai/agents@0.2.1(ws@8.18.3)(zod@3.25.76))(ws@8.18.3)(zod@3.25.76)': + dependencies: + '@ai-sdk/provider': 2.0.0 + '@openai/agents': 0.2.1(ws@8.18.3)(zod@3.25.76) + '@openai/agents-core': 0.2.1(ws@8.18.3)(zod@3.25.76) + '@types/ws': 8.18.1 + debug: 4.4.3 + ws: 8.18.3 + zod: 3.25.76 + transitivePeerDependencies: + - supports-color + + '@openai/agents-openai@0.2.1(ws@8.18.3)(zod@3.25.76)': + dependencies: + '@openai/agents-core': 0.2.1(ws@8.18.3)(zod@3.25.76) + debug: 4.4.3 + openai: 6.7.0(ws@8.18.3)(zod@3.25.76) + zod: 3.25.76 + transitivePeerDependencies: + - supports-color + - ws + + '@openai/agents-realtime@0.2.1(zod@3.25.76)': + dependencies: + '@openai/agents-core': 0.2.1(ws@8.18.3)(zod@3.25.76) + '@types/ws': 8.18.1 + debug: 4.4.3 + ws: 8.18.3 + zod: 3.25.76 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + + '@openai/agents@0.2.1(ws@8.18.3)(zod@3.25.76)': + dependencies: + '@openai/agents-core': 0.2.1(ws@8.18.3)(zod@3.25.76) + '@openai/agents-openai': 0.2.1(ws@8.18.3)(zod@3.25.76) + '@openai/agents-realtime': 0.2.1(zod@3.25.76) + debug: 4.4.3 + openai: 6.7.0(ws@8.18.3)(zod@3.25.76) + zod: 3.25.76 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate + - ws + '@openrouter/ai-sdk-provider@1.2.0(ai@5.0.23(zod@3.25.76))(zod@3.25.76)': dependencies: ai: 5.0.23(zod@3.25.76)