From e3ef79a416bdf58b7acb31854bb8d8ada28368a4 Mon Sep 17 00:00:00 2001 From: Rishabh Srivastava Date: Thu, 6 Feb 2025 10:19:39 +0800 Subject: [PATCH] change gemini defaults from `2.0-flash-exp` to `2.0-flash` --- defog_utils/utils_llm.py | 4 ++-- tests/test_utils_multi_llm.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/defog_utils/utils_llm.py b/defog_utils/utils_llm.py index 4f49da2..2afd9a2 100644 --- a/defog_utils/utils_llm.py +++ b/defog_utils/utils_llm.py @@ -408,7 +408,7 @@ async def chat_together_async( def chat_gemini( messages: List[Dict[str, str]], - model: str = "gemini-2.0-flash-exp", + model: str = "gemini-2.0-flash", max_completion_tokens: int = 8192, temperature: float = 0.0, stop: List[str] = [], @@ -471,7 +471,7 @@ def chat_gemini( async def chat_gemini_async( messages: List[Dict[str, str]], - model: str = "gemini-2.0-flash-exp", + model: str = "gemini-2.0-flash", max_completion_tokens: int = 8192, temperature: float = 0.0, stop: List[str] = [], diff --git a/tests/test_utils_multi_llm.py b/tests/test_utils_multi_llm.py index eece38a..a1f2a02 100644 --- a/tests/test_utils_multi_llm.py +++ b/tests/test_utils_multi_llm.py @@ -189,7 +189,7 @@ async def test_simple_chat_async(self): "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", # "o1-mini", --o1-mini seems to be having issues, and o3-mini will be out soon anyway. so leaving out for now "o1", - "gemini-2.0-flash-exp", + "gemini-2.0-flash", # "deepseek-chat", # "deepseek-reasoner" ] @@ -217,7 +217,7 @@ async def test_sql_chat_async(self): "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo", # "o1-mini", --o1-mini seems to be having issues, and o3-mini will be out soon anyway. so leaving out for now "o1", - "gemini-2.0-flash-exp", + "gemini-2.0-flash", # "deepseek-chat", # "deepseek-reasoner" ] @@ -261,7 +261,7 @@ async def test_sql_chat_structured_async(self): models = [ "gpt-4o", "o1", - "gemini-2.0-flash-exp", + "gemini-2.0-flash", ] for model in models: response = await chat_async(