diff --git a/.changeset/twelve-poets-mate.md b/.changeset/twelve-poets-mate.md new file mode 100644 index 00000000..3551e1e6 --- /dev/null +++ b/.changeset/twelve-poets-mate.md @@ -0,0 +1,5 @@ +--- +'@openai/agents-core': patch +--- + +Fixes handling of `agent_updated_stream_event` in run implementation and adds corresponding test coverage. diff --git a/packages/agents-core/src/run.ts b/packages/agents-core/src/run.ts index 3169e19d..a0c4c4a4 100644 --- a/packages/agents-core/src/run.ts +++ b/packages/agents-core/src/run.ts @@ -824,7 +824,9 @@ export class Runner extends RunHooks> { resetCurrentSpan(); } result.state._currentAgentSpan = undefined; - result._addItem(new RunAgentUpdatedStreamEvent(currentAgent)); + result._addItem( + new RunAgentUpdatedStreamEvent(result.state._currentAgent), + ); result.state._noActiveAgentRun = true; // we've processed the handoff, so we need to run the loop again diff --git a/packages/agents-core/test/run.stream.test.ts b/packages/agents-core/test/run.stream.test.ts index 2e0d7f1d..f7b29f86 100644 --- a/packages/agents-core/test/run.stream.test.ts +++ b/packages/agents-core/test/run.stream.test.ts @@ -4,8 +4,17 @@ import { run, setDefaultModelProvider, setTracingDisabled, + Usage, + RunStreamEvent, + RunAgentUpdatedStreamEvent, + handoff, + Model, + ModelRequest, + ModelResponse, + StreamEvent, + FunctionCallItem, } from '../src'; -import { FakeModel, FakeModelProvider } from './stubs'; +import { FakeModel, FakeModelProvider, fakeModelMessage } from './stubs'; // Test for unhandled rejection when stream loop throws @@ -43,4 +52,67 @@ describe('Runner.run (streaming)', () => { expect((result.error as Error).message).toBe('Not implemented'); }); + + it('emits agent_updated_stream_event with new agent on handoff', async () => { + class SimpleStreamingModel implements Model { + constructor(private resp: ModelResponse) {} + async getResponse(_req: ModelRequest): Promise { + return this.resp; + } + async *getStreamedResponse(): AsyncIterable { + yield { + type: 'response_done', + response: { + id: 'r', + usage: { + requests: 1, + inputTokens: 0, + outputTokens: 0, + totalTokens: 0, + }, + output: this.resp.output, + }, + } as any; + } + } + + const agentB = new Agent({ + name: 'B', + model: new SimpleStreamingModel({ + output: [fakeModelMessage('done B')], + usage: new Usage(), + }), + }); + + const callItem: FunctionCallItem = { + id: 'h1', + type: 'function_call', + name: handoff(agentB).toolName, + callId: 'c1', + status: 'completed', + arguments: '{}', + }; + + const agentA = new Agent({ + name: 'A', + model: new SimpleStreamingModel({ + output: [callItem], + usage: new Usage(), + }), + handoffs: [handoff(agentB)], + }); + + const result = await run(agentA, 'hi', { stream: true }); + const events: RunStreamEvent[] = []; + for await (const e of result.toStream()) { + events.push(e); + } + await result.completed; + + const update = events.find( + (e): e is RunAgentUpdatedStreamEvent => + e.type === 'agent_updated_stream_event', + ); + expect(update?.agent).toBe(agentB); + }); });