Skip to content

Commit

Permalink
be more resilient with incomplete function tools outputs (#909)
Browse files Browse the repository at this point in the history
  • Loading branch information
theomonnom authored Oct 13, 2024
1 parent 19a6767 commit 09d6991
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 2 deletions.
2 changes: 1 addition & 1 deletion livekit-agents/livekit/agents/llm/chat_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def create_tool_from_called_function(
def create_tool_calls(
called_functions: list[function_context.FunctionCallInfo],
) -> "ChatMessage":
return ChatMessage(role="assistant", tool_calls=called_functions)
return ChatMessage(role="assistant", tool_calls=called_functions, content="")

@staticmethod
def create(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -467,6 +467,7 @@ def chat(
temperature = self._opts.temperature

messages = _build_oai_context(chat_ctx, id(self))

cmp = self._client.chat.completions.create(
messages=messages,
model=self._opts.model,
Expand Down Expand Up @@ -543,7 +544,7 @@ def _parse_choice(self, choice: Choice) -> llm.ChatChunk | None:
if call_chunk is not None:
return call_chunk

if choice.finish_reason == "tool_calls":
if choice.finish_reason in ("tool_calls", "stop") and self._tool_call_id:
# we're done with the tool calls, run the last one
return self._try_run_function(choice)

Expand Down Expand Up @@ -576,6 +577,7 @@ def _try_run_function(self, choice: Choice) -> llm.ChatChunk | None:
fnc_info = llm._oai_api.create_ai_function_info(
self._fnc_ctx, self._tool_call_id, self._fnc_name, self._fnc_raw_arguments
)

self._tool_call_id = self._fnc_name = self._fnc_raw_arguments = None
self._function_calls_info.append(fnc_info)

Expand Down

0 comments on commit 09d6991

Please sign in to comment.