Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion src/agents/extensions/models/litellm_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,14 @@
from ...usage import Usage


class InternalChatCompletionMessage(ChatCompletionMessage):
"""
An internal subclass to carry reasoning_content without modifying the original model.
"""

reasoning_content: str


class LitellmModel(Model):
"""This class enables using any model via LiteLLM. LiteLLM allows you to acess OpenAPI,
Anthropic, Gemini, Mistral, and many other models.
Expand Down Expand Up @@ -364,13 +372,18 @@ def convert_message_to_openai(
provider_specific_fields.get("refusal", None) if provider_specific_fields else None
)

return ChatCompletionMessage(
reasoning_content = ""
if hasattr(message, "reasoning_content") and message.reasoning_content:
reasoning_content = message.reasoning_content

return InternalChatCompletionMessage(
content=message.content,
refusal=refusal,
role="assistant",
annotations=cls.convert_annotations_to_openai(message),
audio=message.get("audio", None), # litellm deletes audio if not present
tool_calls=tool_calls,
reasoning_content=reasoning_content,
)

@classmethod
Expand Down
13 changes: 12 additions & 1 deletion src/agents/models/chatcmpl_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
ResponseOutputRefusal,
ResponseOutputText,
ResponseReasoningItem,
ResponseReasoningItemParam,
)
from openai.types.responses.response_input_param import FunctionCallOutput, ItemReference, Message
from openai.types.responses.response_reasoning_item import Summary
Expand Down Expand Up @@ -210,6 +211,12 @@ def maybe_response_output_message(cls, item: Any) -> ResponseOutputMessageParam
return cast(ResponseOutputMessageParam, item)
return None

@classmethod
def maybe_reasoning_message(cls, item: Any) -> ResponseReasoningItemParam | None:
if isinstance(item, dict) and item.get("type") == "reasoning":
return cast(ResponseReasoningItemParam, item)
return None

@classmethod
def extract_text_content(
cls, content: str | Iterable[ResponseInputContentParam]
Expand Down Expand Up @@ -459,7 +466,11 @@ def ensure_assistant_message() -> ChatCompletionAssistantMessageParam:
f"Encountered an item_reference, which is not supported: {item_ref}"
)

# 7) If we haven't recognized it => fail or ignore
# 7) reasoning message => not handled
elif cls.maybe_reasoning_message(item):
pass

# 8) If we haven't recognized it => fail or ignore
else:
raise UserError(f"Unhandled item type or structure: {item}")

Expand Down