diff --git a/src/writer/ai.py b/src/writer/ai.py index bb260a9ad..569bb63bb 100644 --- a/src/writer/ai.py +++ b/src/writer/ai.py @@ -1663,10 +1663,15 @@ def _process_stream_response( chunk |= {"chunk": True} # Handling tool call fragments - if chunk.get("tool_calls") is not None: - self += chunk - self._process_streaming_tool_calls(chunk) - if chunk_data.get("finish_reason") == "tool_calls": + tool_calls_present = chunk.get("tool_calls") is not None + tool_calls_need_processing = \ + chunk_data.get("finish_reason") == "tool_calls" + if tool_calls_present or tool_calls_need_processing: + # Handle tool calls chunks + if tool_calls_present: + self += chunk + self._process_streaming_tool_calls(chunk) + if tool_calls_need_processing: # Send follow-up call to LLM self.messages += self._gather_tool_calls_results() follow_up_response = cast( @@ -1690,7 +1695,6 @@ def _process_stream_response( ) finally: follow_up_response.close() - else: # Handle regular message chunks if chunk.get("content") is not None: