Skip to content
Merged
4 changes: 4 additions & 0 deletions .github/workflows/integration-runner.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ jobs:
run-suffix: deepseek_run
llm-config:
model: litellm_proxy/deepseek/deepseek-chat
- name: Kimi K2 Thinking
run-suffix: kimi_k2_run
llm-config:
model: litellm_proxy/moonshot/kimi-k2-thinking
steps:
- name: Checkout repository
uses: actions/checkout@v5
Expand Down
47 changes: 47 additions & 0 deletions openhands-sdk/openhands/sdk/llm/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,6 +269,7 @@ def to_chat_dict(self) -> dict[str, Any]:
# Assistant function_call(s)
if self.role == "assistant" and self.tool_calls:
message_dict["tool_calls"] = [tc.to_chat_dict() for tc in self.tool_calls]
self._remove_content_if_empty(message_dict)

# Tool result (observation) threading
if self.role == "tool" and self.tool_call_id is not None:
Expand Down Expand Up @@ -331,6 +332,52 @@ def _list_serializer(self) -> dict[str, Any]:
# tool call keys are added in to_chat_dict to centralize behavior
return message_dict

def _remove_content_if_empty(self, message_dict: dict[str, Any]) -> None:
"""Remove empty text content entries from assistant tool-call messages.

Mutates the provided message_dict in-place:
- If content is a string of only whitespace, drop the 'content' key
- If content is a list, remove any text items with empty text; if the list
becomes empty, drop the 'content' key
"""
if "content" not in message_dict:
return

content = message_dict["content"]

if isinstance(content, str):
if content.strip() == "":
message_dict.pop("content", None)
return

if isinstance(content, list):
normalized: list[Any] = []
for item in content:
if not isinstance(item, dict):
normalized.append(item)
continue

if item.get("type") == "text":
text_value = item.get("text", "")
if isinstance(text_value, str):
if text_value.strip() == "":
continue
else:
raise ValueError(
f"Text content item has non-string text value: "
f"{text_value!r}"
)

normalized.append(item)

if normalized:
message_dict["content"] = normalized
else:
message_dict.pop("content", None)
return

# Any other content shape is left as-is

def to_responses_value(self, *, vision_enabled: bool) -> str | list[dict[str, Any]]:
"""Return serialized form.

Expand Down
39 changes: 39 additions & 0 deletions tests/sdk/llm/test_message.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,45 @@ def test_message_with_tool_calls():
assert result["tool_calls"][0]["function"]["arguments"] == '{"arg": "value"}'


def test_message_tool_calls_drop_empty_string_content():
"""Assistant tool calls with no text should not include empty content strings."""
from openhands.sdk.llm.message import Message, MessageToolCall

tool_call = MessageToolCall(
id="call_empty",
name="test_function",
arguments="{}",
origin="completion",
)

message = Message(role="assistant", content=[], tool_calls=[tool_call])

result = message.to_chat_dict()
assert "content" not in result


def test_message_tool_calls_strip_blank_list_content():
"""List-serialized tool call messages should drop blank text content blocks."""
from openhands.sdk.llm.message import Message, MessageToolCall, TextContent

tool_call = MessageToolCall(
id="call_blank_list",
name="test_function",
arguments="{}",
origin="completion",
)

message = Message(
role="assistant",
content=[TextContent(text="")],
tool_calls=[tool_call],
function_calling_enabled=True,
)

result = message.to_chat_dict()
assert "content" not in result


def test_message_from_llm_chat_message_function_role_error():
"""Test Message.from_llm_chat_message with function role raises error."""
from litellm.types.utils import Message as LiteLLMMessage
Expand Down
Loading