Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Update chatcmpl_stream_handler.py
  • Loading branch information
rm-openai authored Apr 24, 2025
commit 837a2efad83343f4395a886601b6d5a8e804c35c
2 changes: 2 additions & 0 deletions src/agents/models/chatcmpl_stream_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ async def handle_stream(
type="response.created",
)

# This is always set by the OpenAI API, but not by others e.g. LiteLLM
usage = chunk.usage if hasattr(chunk, "usage") else None

if not chunk.choices or not chunk.choices[0].delta:
Expand Down Expand Up @@ -112,6 +113,7 @@ async def handle_stream(
state.text_content_index_and_output[1].text += delta.content

# Handle refusals (model declines to answer)
# This is always set by the OpenAI API, but not by others e.g. LiteLLM
if hasattr(delta, "refusal") and delta.refusal:
if not state.refusal_content_index_and_output:
# Initialize a content tracker for streaming refusal text
Expand Down
Loading