Skip to content

Commit 89d526c

Browse files
authored
Merge pull request #9 from vrtnis/codex/fix-test-assertions-for-item-injection
Handle injected inputs during streaming
2 parents af3c9fc + ce6e5ac commit 89d526c

File tree

1 file changed

+15
-0
lines changed

1 file changed

+15
-0
lines changed

src/agents/run.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1135,6 +1135,7 @@ async def _run_single_turn_streamed(
11351135
model_settings = RunImpl.maybe_reset_tool_choice(agent, tool_use_tracker, model_settings)
11361136

11371137
final_response: ModelResponse | None = None
1138+
injected_during_turn = False
11381139

11391140
input = ItemHelpers.input_to_new_input_list(streamed_result.input)
11401141
input.extend([item.to_input_item() for item in streamed_result.new_items])
@@ -1205,6 +1206,20 @@ async def _run_single_turn_streamed(
12051206

12061207
streamed_result._event_queue.put_nowait(RawResponsesStreamEvent(data=event))
12071208

1209+
# Break early if new items were injected during this turn.
1210+
if injected and len(injected) > 0:
1211+
injected_during_turn = True
1212+
break
1213+
1214+
if injected_during_turn and final_response is None:
1215+
return SingleStepResult(
1216+
original_input=streamed_result.input,
1217+
model_response=ModelResponse(output=[], usage=Usage(), response_id=None),
1218+
pre_step_items=streamed_result.new_items,
1219+
new_step_items=[],
1220+
next_step=NextStepRunAgain(),
1221+
)
1222+
12081223
# --- NEW: if cancelled during streaming, terminate cleanly ---
12091224
if (
12101225
getattr(streamed_result, "_cancel_token", None)

0 commit comments

Comments
 (0)