Skip to content

Commit 04eec50

Browse files
authored
Fix openai#1846 Litellm: fails with function name for tool_choice parameter w/ streaming enabled (openai#1971)
1 parent bacc65b commit 04eec50

File tree

1 file changed

+8
-3
lines changed

1 file changed

+8
-3
lines changed

src/agents/extensions/models/litellm_model.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@
4444
from ...models.chatcmpl_stream_handler import ChatCmplStreamHandler
4545
from ...models.fake_id import FAKE_RESPONSES_ID
4646
from ...models.interface import Model, ModelTracing
47+
from ...models.openai_responses import Converter as OpenAIResponsesConverter
4748
from ...tool import Tool
4849
from ...tracing import generation_span
4950
from ...tracing.span_data import GenerationSpanData
@@ -367,15 +368,19 @@ async def _fetch_response(
367368
if isinstance(ret, litellm.types.utils.ModelResponse):
368369
return ret
369370

371+
responses_tool_choice = OpenAIResponsesConverter.convert_tool_choice(
372+
model_settings.tool_choice
373+
)
374+
if responses_tool_choice is None or responses_tool_choice is omit:
375+
responses_tool_choice = "auto"
376+
370377
response = Response(
371378
id=FAKE_RESPONSES_ID,
372379
created_at=time.time(),
373380
model=self.model,
374381
object="response",
375382
output=[],
376-
tool_choice=cast(Literal["auto", "required", "none"], tool_choice)
377-
if tool_choice is not omit
378-
else "auto",
383+
tool_choice=responses_tool_choice, # type: ignore[arg-type]
379384
top_p=model_settings.top_p,
380385
temperature=model_settings.temperature,
381386
tools=[],

0 commit comments

Comments
 (0)