Skip to content

Commit

Permalink
bugfix
Browse files Browse the repository at this point in the history
  • Loading branch information
rishsriv committed Nov 22, 2024
1 parent 3923e8d commit e49a872
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 2 deletions.
6 changes: 4 additions & 2 deletions defog_utils/utils_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@ def chat_openai(
sys_msg = messages[0]["content"]
messages = messages[1:]
messages[0]["content"] = sys_msg + messages[0]["content"]

response = client_openai.chat.completions.create(
messages=messages,
model=model,
Expand Down Expand Up @@ -158,7 +159,7 @@ def chat_openai(
print("Empty response")
return None

if response_format:
if response_format and model not in ["o1-mini", "o1-preview", "o1"]:
content = response.choices[0].message.parsed
else:
content = response.choices[0].message.content
Expand Down Expand Up @@ -195,6 +196,7 @@ async def chat_openai_async(
sys_msg = messages[0]["content"]
messages = messages[1:]
messages[0]["content"] = sys_msg + messages[0]["content"]

response = await client_openai.chat.completions.create(
messages=messages,
model=model,
Expand All @@ -221,7 +223,7 @@ async def chat_openai_async(
seed=seed,
)

if response_format:
if response_format and model not in ["o1-mini", "o1-preview", "o1"]:
content = response.choices[0].message.parsed
else:
content = response.choices[0].message.content
Expand Down
1 change: 1 addition & 0 deletions defog_utils/utils_multi_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ async def chat_async(
"""
llm_function = map_model_to_chat_fn_async(model)
return await llm_function(
model=model,
messages=messages,
max_completion_tokens=max_completion_tokens,
temperature=temperature,
Expand Down

0 comments on commit e49a872

Please sign in to comment.