From dc6c085d931867bfcbd63e3fe69c92cf828f0180 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?th=E1=BB=8Bnh?= Date: Tue, 21 Jan 2025 05:27:42 +0700 Subject: [PATCH] Use deault temperature(.7) for chat stream --- backend/utils/llm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/utils/llm.py b/backend/utils/llm.py index 692db4f07..d52b07086 100644 --- a/backend/utils/llm.py +++ b/backend/utils/llm.py @@ -28,7 +28,7 @@ llm_large = ChatOpenAI(model='o1-preview') llm_large_stream = ChatOpenAI(model='o1-preview', streaming=True, temperature=1) llm_medium = ChatOpenAI(model='gpt-4o') -llm_medium_stream = ChatOpenAI(model='gpt-4o', streaming=True, temperature=1) +llm_medium_stream = ChatOpenAI(model='gpt-4o', streaming=True) embeddings = OpenAIEmbeddings(model="text-embedding-3-large") parser = PydanticOutputParser(pydantic_object=Structured)