Coverage for src / harness_utils / conversion / to_model.py: 60%
58 statements
« prev ^ index » next coverage.py v7.13.2, created at 2026-01-31 13:47 -0600
« prev ^ index » next coverage.py v7.13.2, created at 2026-01-31 13:47 -0600
1"""Convert internal messages to model format.
3Transforms internal message format (granular, storage-optimized)
4to model format (LLM-compatible).
5"""
7from typing import Any
9from harness_utils.models.message import Message
10from harness_utils.models.parts import ToolPart
13def to_model_messages(messages: list[Message]) -> list[dict[str, Any]]:
14 """Convert internal messages to model format.
16 Handles:
17 - Compacted tool outputs (replace with marker)
18 - Interrupted tool calls (inject error)
19 - Two-part system prompts
20 - Stop at summary message
22 Args:
23 messages: Internal message objects
25 Returns:
26 List of messages in model format
27 """
28 model_messages: list[dict[str, Any]] = []
29 summary_found = False
30 summary_parent_id: str | None = None
32 for msg in reversed(messages):
33 if summary_found and msg.role == "user" and msg.id == summary_parent_id:
34 break
36 if msg.role == "assistant" and msg.summary:
37 summary_found = True
38 summary_parent_id = msg.parent_id
40 if len(msg.parts) == 0:
41 continue
43 if msg.role == "user":
44 model_msg = _convert_user_message(msg)
45 if model_msg and model_msg.get("content"):
46 model_messages.insert(0, model_msg)
48 elif msg.role == "assistant":
49 model_msg = _convert_assistant_message(msg)
50 if model_msg and model_msg.get("content"):
51 model_messages.insert(0, model_msg)
53 return model_messages
56def _convert_user_message(msg: Message) -> dict[str, Any] | None:
57 """Convert user message to model format.
59 Args:
60 msg: User message
62 Returns:
63 Model format message or None if empty
64 """
65 content_parts: list[str] = []
67 for part in msg.parts:
68 if part.type == "text" and not getattr(part, "ignored", False):
69 content_parts.append(part.text)
70 elif part.type == "compaction":
71 content_parts.append("What did we do so far?")
73 if not content_parts:
74 return None
76 return {
77 "role": "user",
78 "content": "\n".join(content_parts),
79 }
82def _convert_assistant_message(msg: Message) -> dict[str, Any] | None:
83 """Convert assistant message to model format.
85 Args:
86 msg: Assistant message
88 Returns:
89 Model format message or None if empty
90 """
91 if msg.error and not msg.has_partial_output():
92 return None
94 content_parts: list[str] = []
96 for part in msg.parts:
97 if part.type == "text":
98 content_parts.append(part.text)
100 elif part.type == "reasoning":
101 content_parts.append(f"[Extended thinking: {len(part.text)} chars]")
103 elif part.type == "tool":
104 tool_part = part
105 assert isinstance(tool_part, ToolPart)
107 if tool_part.state.status == "completed":
108 if tool_part.state.time and tool_part.state.time.compacted:
109 output = "[Old tool result content cleared]"
110 else:
111 output = tool_part.state.output
113 content_parts.append(
114 f"[Tool: {tool_part.tool}] {tool_part.state.title}\n{output}"
115 )
117 elif tool_part.state.status == "error":
118 content_parts.append(
119 f"[Tool Error: {tool_part.tool}] {tool_part.state.error}"
120 )
122 elif tool_part.state.status in ["pending", "running"]:
123 content_parts.append(
124 f"[Tool execution was interrupted: {tool_part.tool}]"
125 )
127 if not content_parts:
128 return None
130 return {
131 "role": "assistant",
132 "content": "\n\n".join(content_parts),
133 }