From e867e13d1bb06d14fca5176e8bfe4dae2db000a3 Mon Sep 17 00:00:00 2001 From: Simon Willison Date: Sun, 18 Aug 2024 17:39:09 -0700 Subject: [PATCH] Use model_dump() instead of dict() Closes #554 --- llm/default_plugins/openai_models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/llm/default_plugins/openai_models.py b/llm/default_plugins/openai_models.py index 28e3e9f7..e8ca566d 100644 --- a/llm/default_plugins/openai_models.py +++ b/llm/default_plugins/openai_models.py @@ -325,7 +325,7 @@ def execute(self, prompt, stream, response, conversation=None): stream=False, **kwargs, ) - response.response_json = remove_dict_none_values(completion.dict()) + response.response_json = remove_dict_none_values(completion.model_dump()) yield completion.choices[0].message.content def get_client(self): @@ -412,7 +412,7 @@ def execute(self, prompt, stream, response, conversation=None): stream=False, **kwargs, ) - response.response_json = remove_dict_none_values(completion.dict()) + response.response_json = remove_dict_none_values(completion.model_dump()) yield completion.choices[0].text