Skip to content

Commit b2d4fe6

Browse files
voice: add response_format to ModelSettings and chat()
1 parent 0722371 commit b2d4fe6

File tree

9 files changed

+19
-4
lines changed

9 files changed

+19
-4
lines changed

livekit-agents/livekit/agents/llm/fallback_adapter.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -92,6 +92,7 @@ def chat(
9292
conn_options: APIConnectOptions = DEFAULT_FALLBACK_API_CONNECT_OPTIONS,
9393
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
9494
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
95+
response_format: NotGivenOr[Any] = NOT_GIVEN,
9596
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
9697
) -> LLMStream:
9798
return FallbackLLMStream(
@@ -101,6 +102,7 @@ def chat(
101102
tools=tools or [],
102103
parallel_tool_calls=parallel_tool_calls,
103104
tool_choice=tool_choice,
105+
response_format=response_format,
104106
extra_kwargs=extra_kwargs,
105107
)
106108

@@ -124,12 +126,14 @@ def __init__(
124126
conn_options: APIConnectOptions,
125127
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
126128
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
129+
response_format: NotGivenOr[Any] = NOT_GIVEN,
127130
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
128131
) -> None:
129132
super().__init__(llm, chat_ctx=chat_ctx, tools=tools, conn_options=conn_options)
130133
self._fallback_adapter = llm
131134
self._parallel_tool_calls = parallel_tool_calls
132135
self._tool_choice = tool_choice
136+
self._response_format = response_format
133137
self._extra_kwargs = extra_kwargs
134138

135139
self._current_stream: LLMStream | None = None
@@ -164,6 +168,7 @@ async def _try_generate(
164168
tools=self._tools,
165169
parallel_tool_calls=self._parallel_tool_calls,
166170
tool_choice=self._tool_choice,
171+
response_format=self._response_format,
167172
extra_kwargs=self._extra_kwargs,
168173
conn_options=dataclasses.replace(
169174
self._conn_options,

livekit-agents/livekit/agents/llm/llm.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,7 @@ def chat(
128128
conn_options: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,
129129
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
130130
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
131+
response_format: NotGivenOr[Any] = NOT_GIVEN,
131132
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
132133
) -> LLMStream: ...
133134

livekit-agents/livekit/agents/voice/agent.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@
2929
class ModelSettings:
3030
tool_choice: NotGivenOr[llm.ToolChoice] = NOT_GIVEN
3131
"""The tool choice to use when calling the LLM."""
32+
response_format: NotGivenOr[Any] = NOT_GIVEN
33+
"""The response format to use when calling the LLM."""
3234

3335

3436
class Agent:
@@ -409,11 +411,16 @@ async def llm_node(
409411
)
410412

411413
tool_choice = model_settings.tool_choice if model_settings else NOT_GIVEN
414+
response_format = model_settings.response_format if model_settings else NOT_GIVEN
412415
activity_llm = activity.llm
413416

414417
conn_options = activity.session.conn_options.llm_conn_options
415418
async with activity_llm.chat(
416-
chat_ctx=chat_ctx, tools=tools, tool_choice=tool_choice, conn_options=conn_options
419+
chat_ctx=chat_ctx,
420+
tools=tools,
421+
tool_choice=tool_choice,
422+
response_format=response_format,
423+
conn_options=conn_options,
417424
) as stream:
418425
async for chunk in stream:
419426
yield chunk

livekit-plugins/livekit-plugins-anthropic/livekit/plugins/anthropic/llm.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,6 +130,7 @@ def chat(
130130
conn_options: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,
131131
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
132132
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
133+
response_format: NotGivenOr[Any] = NOT_GIVEN,
133134
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
134135
) -> LLMStream:
135136
extra = {}

livekit-plugins/livekit-plugins-aws/livekit/plugins/aws/llm.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,6 +131,7 @@ def chat(
131131
conn_options: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,
132132
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
133133
temperature: NotGivenOr[float] = NOT_GIVEN,
134+
response_format: NotGivenOr[Any] = NOT_GIVEN,
134135
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
135136
) -> LLMStream:
136137
opts: dict[str, Any] = {}

livekit-plugins/livekit-plugins-langchain/livekit/plugins/langchain/langgraph.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ def chat(
6464
# these are unused, since tool execution takes place in langgraph
6565
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
6666
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
67+
response_format: NotGivenOr[Any] = NOT_GIVEN,
6768
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
6869
) -> LangGraphStream[ContextT]:
6970
return LangGraphStream(

livekit-plugins/livekit-plugins-mistralai/livekit/plugins/mistralai/llm.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -90,9 +90,6 @@ def chat(
9090
if is_given(tool_choice):
9191
extra["tool_choice"] = tool_choice
9292

93-
if is_given(response_format):
94-
extra["response_format"] = response_format
95-
9693
return LLMStream(
9794
self,
9895
model=self._opts.model,

livekit-plugins/livekit-plugins-openai/livekit/plugins/openai/responses/llm.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -119,6 +119,7 @@ def chat(
119119
conn_options: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,
120120
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
121121
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
122+
response_format: NotGivenOr[Any] = NOT_GIVEN,
122123
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
123124
) -> LLMStream:
124125
extra = {}

tests/fake_llm.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@ def chat(
6262
conn_options: APIConnectOptions = DEFAULT_API_CONNECT_OPTIONS,
6363
parallel_tool_calls: NotGivenOr[bool] = NOT_GIVEN,
6464
tool_choice: NotGivenOr[ToolChoice] = NOT_GIVEN,
65+
response_format: NotGivenOr[Any] = NOT_GIVEN,
6566
extra_kwargs: NotGivenOr[dict[str, Any]] = NOT_GIVEN,
6667
) -> LLMStream:
6768
return FakeLLMStream(self, chat_ctx=chat_ctx, tools=tools or [], conn_options=conn_options)

0 commit comments

Comments
 (0)