Skip to content

Commit 6d86446

Browse files
authored
OpenAIFunctionWrapper improvements (#21)
* feat: OpenAIFunctionWrapper now supports returning raw function call response without adding it to the LLM convo or call history of the OpenAIFunctionWrapper. --------- Signed-off-by: Trayan Azarov <[email protected]>
1 parent 59b2ed1 commit 6d86446

File tree

2 files changed

+19
-1
lines changed

2 files changed

+19
-1
lines changed

func_ai/utils/llm_tools.py

+18
Original file line numberDiff line numberDiff line change
@@ -531,6 +531,24 @@ def from_response(self, llm_response: dict[str, any]) -> "OpenAIFunctionWrapper"
531531
self.llm_interface.add_conversation_message(_function_call_llm_response)
532532
return self
533533

534+
def from_response_raw(self, llm_response: dict[str, any]) -> any:
535+
"""
536+
Returns an instance of the class from LLM completion response
537+
538+
:param llm_response: completion response from LLM
539+
:return: The response from the function call
540+
"""
541+
if "function_call" not in llm_response:
542+
raise ValueError(f"No function call detected: {llm_response}")
543+
if llm_response["function_call"]["name"] != self.name:
544+
raise ValueError(f"Function name does not match: {llm_response}")
545+
try:
546+
_func_response = self.func(**json.loads(llm_response["function_call"]["arguments"]))
547+
return _func_response
548+
except Exception as e:
549+
logger.warning(f"Failed to process function call: {llm_response}")
550+
raise e
551+
534552
def from_prompt(self, prompt: str, **kwargs) -> "OpenAIFunctionWrapper":
535553
"""
536554
Returns an instance of the class from LLM prompt

pyproject.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "func-ai"
3-
version = "0.0.13"
3+
version = "0.0.14"
44
description = "AI Functional Catalog - OpenAI functions on steriods"
55
authors = ["Trayan Azarov <[email protected]>"]
66
readme = "README.md"

0 commit comments

Comments
 (0)