From e49a87206db63e6412fa1c0ba2be730d288e478c Mon Sep 17 00:00:00 2001 From: Rishabh Srivastava Date: Fri, 22 Nov 2024 14:55:21 +0800 Subject: [PATCH] bugfix --- defog_utils/utils_llm.py | 6 ++++-- defog_utils/utils_multi_llm.py | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/defog_utils/utils_llm.py b/defog_utils/utils_llm.py index f12ff9e..d96bf71 100644 --- a/defog_utils/utils_llm.py +++ b/defog_utils/utils_llm.py @@ -126,6 +126,7 @@ def chat_openai( sys_msg = messages[0]["content"] messages = messages[1:] messages[0]["content"] = sys_msg + messages[0]["content"] + response = client_openai.chat.completions.create( messages=messages, model=model, @@ -158,7 +159,7 @@ def chat_openai( print("Empty response") return None - if response_format: + if response_format and model not in ["o1-mini", "o1-preview", "o1"]: content = response.choices[0].message.parsed else: content = response.choices[0].message.content @@ -195,6 +196,7 @@ async def chat_openai_async( sys_msg = messages[0]["content"] messages = messages[1:] messages[0]["content"] = sys_msg + messages[0]["content"] + response = await client_openai.chat.completions.create( messages=messages, model=model, @@ -221,7 +223,7 @@ async def chat_openai_async( seed=seed, ) - if response_format: + if response_format and model not in ["o1-mini", "o1-preview", "o1"]: content = response.choices[0].message.parsed else: content = response.choices[0].message.content diff --git a/defog_utils/utils_multi_llm.py b/defog_utils/utils_multi_llm.py index a6d18c3..afd37b0 100644 --- a/defog_utils/utils_multi_llm.py +++ b/defog_utils/utils_multi_llm.py @@ -66,6 +66,7 @@ async def chat_async( """ llm_function = map_model_to_chat_fn_async(model) return await llm_function( + model=model, messages=messages, max_completion_tokens=max_completion_tokens, temperature=temperature,