diff --git a/README.md b/README.md index 0b74778ae..f648146bd 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ Start building LLM-empowered multi-agent applications in an easier way. [![](https://img.shields.io/badge/cs.MA-2402.14034-B31C1C?logo=arxiv&logoColor=B31C1C)](https://arxiv.org/abs/2402.14034) [![](https://img.shields.io/badge/python-3.9+-blue)](https://pypi.org/project/agentscope/) -[![](https://img.shields.io/badge/pypi-v0.0.6a2-blue?logo=pypi)](https://pypi.org/project/agentscope/) +[![](https://img.shields.io/badge/pypi-v0.1.0-blue?logo=pypi)](https://pypi.org/project/agentscope/) [![](https://img.shields.io/badge/Docs-English%7C%E4%B8%AD%E6%96%87-blue?logo=markdown)](https://modelscope.github.io/agentscope/#welcome-to-agentscope-tutorial-hub) [![](https://img.shields.io/badge/Docs-API_Reference-blue?logo=markdown)](https://modelscope.github.io/agentscope/) [![](https://img.shields.io/badge/ModelScope-Demos-4e29ff.svg?logo=data:image/svg+xml;base64,PHN2ZyB2aWV3Qm94PSIwIDAgMjI0IDEyMS4zMyIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KCTxwYXRoIGQ9Im0wIDQ3Ljg0aDI1LjY1djI1LjY1aC0yNS42NXoiIGZpbGw9IiM2MjRhZmYiIC8+Cgk8cGF0aCBkPSJtOTkuMTQgNzMuNDloMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzYyNGFmZiIgLz4KCTxwYXRoIGQ9Im0xNzYuMDkgOTkuMTRoLTI1LjY1djIyLjE5aDQ3Ljg0di00Ny44NGgtMjIuMTl6IiBmaWxsPSIjNjI0YWZmIiAvPgoJPHBhdGggZD0ibTEyNC43OSA0Ny44NGgyNS42NXYyNS42NWgtMjUuNjV6IiBmaWxsPSIjMzZjZmQxIiAvPgoJPHBhdGggZD0ibTAgMjIuMTloMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzM2Y2ZkMSIgLz4KCTxwYXRoIGQ9Im0xOTguMjggNDcuODRoMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzYyNGFmZiIgLz4KCTxwYXRoIGQ9Im0xOTguMjggMjIuMTloMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzM2Y2ZkMSIgLz4KCTxwYXRoIGQ9Im0xNTAuNDQgMHYyMi4xOWgyNS42NXYyNS42NWgyMi4xOXYtNDcuODR6IiBmaWxsPSIjNjI0YWZmIiAvPgoJPHBhdGggZD0ibTczLjQ5IDQ3Ljg0aDI1LjY1djI1LjY1aC0yNS42NXoiIGZpbGw9IiMzNmNmZDEiIC8+Cgk8cGF0aCBkPSJtNDcuODQgMjIuMTloMjUuNjV2LTIyLjE5aC00Ny44NHY0Ny44NGgyMi4xOXoiIGZpbGw9IiM2MjRhZmYiIC8+Cgk8cGF0aCBkPSJtNDcuODQgNzMuNDloLTIyLjE5djQ3Ljg0aDQ3Ljg0di0yMi4xOWgtMjUuNjV6IiBmaWxsPSIjNjI0YWZmIiAvPgo8L3N2Zz4K)](https://modelscope.cn/studios?name=agentscope&page=1&sort=latest) @@ -220,23 +220,41 @@ cd agentscope pip install -e . ``` -- To build distributed multi-agent applications: +### Using pip + +- Install AgentScope from pip: ```bash -# On windows -pip install -e .[distribute] -# On mac -pip install -e .\[distribute\] +pip install agentscope ``` -### Using pip +### Extra Dependencies + +To support different deployment scenarios, AgentScope provides several +optional dependencies. Full list of optional dependencies refers to +[tutorial](https://doc.agentscope.io/en/tutorial/102-installation.html) +Taking distribution mode as an example, you can install its dependencies +as follows: -- Install AgentScope from pip: + +#### On Windows ```bash -pip install agentscope --pre +# From source +pip install -e .[distribute] +# From pypi +pip install agentscope[distribute] ``` +#### On Mac & Linux +```bash +# From source +pip install -e .\[distribute\] +# From pypi +pip install agentscope\[distribute\] +``` + + ## Quick Start ### Configuration diff --git a/README_ZH.md b/README_ZH.md index 059f9b624..8c3be6469 100644 --- a/README_ZH.md +++ b/README_ZH.md @@ -10,7 +10,7 @@ [![](https://img.shields.io/badge/cs.MA-2402.14034-B31C1C?logo=arxiv&logoColor=B31C1C)](https://arxiv.org/abs/2402.14034) [![](https://img.shields.io/badge/python-3.9+-blue)](https://pypi.org/project/agentscope/) -[![](https://img.shields.io/badge/pypi-v0.0.6a2-blue?logo=pypi)](https://pypi.org/project/agentscope/) +[![](https://img.shields.io/badge/pypi-v0.1.0-blue?logo=pypi)](https://pypi.org/project/agentscope/) [![](https://img.shields.io/badge/Docs-English%7C%E4%B8%AD%E6%96%87-blue?logo=markdown)](https://modelscope.github.io/agentscope/#welcome-to-agentscope-tutorial-hub) [![](https://img.shields.io/badge/Docs-API_Reference-blue?logo=markdown)](https://modelscope.github.io/agentscope/) [![](https://img.shields.io/badge/ModelScope-Demos-4e29ff.svg?logo=data:image/svg+xml;base64,PHN2ZyB2aWV3Qm94PSIwIDAgMjI0IDEyMS4zMyIgeG1sbnM9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIj4KCTxwYXRoIGQ9Im0wIDQ3Ljg0aDI1LjY1djI1LjY1aC0yNS42NXoiIGZpbGw9IiM2MjRhZmYiIC8+Cgk8cGF0aCBkPSJtOTkuMTQgNzMuNDloMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzYyNGFmZiIgLz4KCTxwYXRoIGQ9Im0xNzYuMDkgOTkuMTRoLTI1LjY1djIyLjE5aDQ3Ljg0di00Ny44NGgtMjIuMTl6IiBmaWxsPSIjNjI0YWZmIiAvPgoJPHBhdGggZD0ibTEyNC43OSA0Ny44NGgyNS42NXYyNS42NWgtMjUuNjV6IiBmaWxsPSIjMzZjZmQxIiAvPgoJPHBhdGggZD0ibTAgMjIuMTloMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzM2Y2ZkMSIgLz4KCTxwYXRoIGQ9Im0xOTguMjggNDcuODRoMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzYyNGFmZiIgLz4KCTxwYXRoIGQ9Im0xOTguMjggMjIuMTloMjUuNjV2MjUuNjVoLTI1LjY1eiIgZmlsbD0iIzM2Y2ZkMSIgLz4KCTxwYXRoIGQ9Im0xNTAuNDQgMHYyMi4xOWgyNS42NXYyNS42NWgyMi4xOXYtNDcuODR6IiBmaWxsPSIjNjI0YWZmIiAvPgoJPHBhdGggZD0ibTczLjQ5IDQ3Ljg0aDI1LjY1djI1LjY1aC0yNS42NXoiIGZpbGw9IiMzNmNmZDEiIC8+Cgk8cGF0aCBkPSJtNDcuODQgMjIuMTloMjUuNjV2LTIyLjE5aC00Ny44NHY0Ny44NGgyMi4xOXoiIGZpbGw9IiM2MjRhZmYiIC8+Cgk8cGF0aCBkPSJtNDcuODQgNzMuNDloLTIyLjE5djQ3Ljg0aDQ3Ljg0di0yMi4xOWgtMjUuNjV6IiBmaWxsPSIjNjI0YWZmIiAvPgo8L3N2Zz4K)](https://modelscope.cn/studios?name=agentscope&page=1&sort=latest) @@ -208,21 +208,35 @@ cd agentscope pip install -e . ``` -- 构建分布式多智能体应用需要按照以下方式安装: +### 使用pip + +- 从pip安装的AgentScope ```bash -# 在windows上 -pip install -e .[distribute] -# 在mac上 -pip install -e .\[distribute\] +pip install agentscope ``` -### 使用pip +### 额外依赖 -- 从pip安装的AgentScope +为了支持不同的部署场景,AgentScope提供了若干个可选的依赖项。 +完整的可选依赖项列表请参考[tutorial](https://doc.agentscope.io/en/tutorial/102-installation.html) +以分布式模式为例,可以使用以下命令安装AgentScope: +#### On Windows + +```bash +# From source +pip install -e .[distribute] +# From pypi +pip install agentscope[distribute] +``` + +#### On Mac & Linux ```bash -pip install agentscope --pre +# From source +pip install -e .\[distribute\] +# From pypi +pip install agentscope\[distribute\] ``` ## 快速开始 diff --git a/docs/sphinx_doc/en/source/tutorial/102-installation.md b/docs/sphinx_doc/en/source/tutorial/102-installation.md index e1f16d305..a39b1113c 100644 --- a/docs/sphinx_doc/en/source/tutorial/102-installation.md +++ b/docs/sphinx_doc/en/source/tutorial/102-installation.md @@ -41,9 +41,7 @@ If you prefer to install AgentScope from Pypi, you can do so easily using `pip`: ```bash # For centralized multi-agent applications -pip install agentscope --pre -# For distributed multi-agent applications -pip install agentscope[distribute] --pre # On Mac use `pip install agentscope\[distribute\] --pre` +pip install agentscope ``` ### Install from Source @@ -59,10 +57,33 @@ cd agentscope # For centralized multi-agent applications pip install -e . -# For distributed multi-agent applications -pip install -e .[distribute] # On Mac use `pip install -e .\[distribute\]` ``` -**Note**: The `[distribute]` option installs additional dependencies required for distributed applications. Remember to activate your virtual environment before running these commands. +### Extra Dependencies + +The supported optional dependencies for AgentScope are list as follows: + +- ollama: Ollama API +- litellm: Litellm API +- zhipuai: Zhipuai API +- gemini: Gemini API +- service: The dependencies for different service functions +- distribute: The dependencies for distribution mode +- full: All the dependencies + +You can install one or more of these dependencies by adding them to the installation command. + +#### Windows +```bash +pip install agentscope[gemini] +# or +pip install agentscope[ollama,distribute] +``` +#### Mac & Linux +```bash +pip install agentscope\[gemini\] +# or +pip install agentscope\[ollama,distribute\] +``` [[Return to the top]](#102-installation-en) diff --git a/docs/sphinx_doc/zh_CN/source/tutorial/102-installation.md b/docs/sphinx_doc/zh_CN/source/tutorial/102-installation.md index ad1ac3659..14e8b7c35 100644 --- a/docs/sphinx_doc/zh_CN/source/tutorial/102-installation.md +++ b/docs/sphinx_doc/zh_CN/source/tutorial/102-installation.md @@ -48,22 +48,45 @@ cd agentscope # 针对本地化的multi-agent应用 pip install -e . -# 为分布式multi-agent应用 -pip install -e .[distribute] # 在Mac上使用`pip install -e .\[distribute\]` ``` -**注意**:`[distribute]`选项安装了分布式应用程序所需的额外依赖项。在运行这些命令之前,请激活您的虚拟环境。 - ### 使用Pip安装 如果您选择从Pypi安装AgentScope,可以使用`pip`轻松地完成: ```bash # 针对本地化的multi-agent应用 -pip install agentscope --pre +pip install agentscope +``` + +### 额外依赖 + +AgentScope 支持可选依赖如下,用户可以根据自己的需求选择安装: + +- ollama: Ollama API +- litellm: Litellm API +- zhipuai: Zhipuai API +- gemini: Gemini API +- service: 不同工具函数的依赖 +- distribute: 分布式模式的依赖 +- full: 一次性安装上述所有的依赖,可能耗时较长 + +可以通过将它们添加到安装命令中来安装这些依赖。 -# 为分布式multi-agent应用 -pip install agentscope[distribute] --pre # 在Mac上使用`pip install agentscope\[distribute\] --pre` +#### Windows + +```bash +pip install agentscope[gemini] +# or +pip install agentscope[ollama,distribute] +``` + +#### Mac & Linux + +```bash +pip install agentscope\[gemini\] +# or +pip install agentscope\[ollama,distribute\] ``` [[返回顶端]](#102-installation-zh) diff --git a/setup.py b/setup.py index 798d6713f..cd577d5b8 100644 --- a/setup.py +++ b/setup.py @@ -17,50 +17,6 @@ NAME = "agentscope" URL = "https://github.com/modelscope/agentscope" -rpc_requires = [ - "grpcio==1.60.0", - "grpcio-tools==1.60.0", - "protobuf==4.25.0", - "expiringdict", - "dill", - "psutil", -] - -service_requires = [ - "docker", - "pymongo", - "pymysql", - "bs4", - "beautifulsoup4", - "feedparser", -] - -doc_requires = [ - "sphinx", - "sphinx-autobuild", - "sphinx_rtd_theme", - "myst-parser", - "sphinxcontrib-mermaid", -] - -test_requires = ["pytest", "pytest-cov", "pre-commit"] - -gradio_requires = [ - "gradio==4.19.1", - "modelscope_studio==0.0.5", -] - -rag_requires = [ - "llama-index==0.10.30", -] - -studio_requires = [] - -web_requires = [ - "playwright", - "markdownify", -] - # released requires minimal_requires = [ "networkx", @@ -71,47 +27,85 @@ "tiktoken", "Pillow", "requests", - "chardet", "inputimeout", - "openai>=1.3.0", "numpy", "Flask==3.0.0", "Flask-Cors==4.0.0", "Flask-SocketIO==5.3.6", "flask_sqlalchemy", "flake8", - # TODO: move into other requires - "dashscope==1.14.1", + "psutil", + "scipy", + # Leaving openai and dashscope here as default supports "openai>=1.3.0", - "ollama>=0.1.7", - "google-generativeai>=0.4.0", - "zhipuai", - "litellm", + "dashscope==1.14.1", +] + +extra_service_requires = [ + "docker", + "pymongo", + "pymysql", + "bs4", + "beautifulsoup4", + "feedparser", "notebook", "nbclient", "nbformat", - "psutil", - "scipy", - "pillow", + "playwright", + "markdownify", +] + +extra_distribute_requires = [ + "grpcio==1.60.0", + "grpcio-tools==1.60.0", + "protobuf==4.25.0", + "expiringdict", + "dill", +] + +extra_dev_requires = [ + # unit test + "pytest", + "pytest-cov", + "pre-commit", + # doc + "sphinx", + "sphinx-autobuild", + "sphinx_rtd_theme", + "myst-parser", + "sphinxcontrib-mermaid", ] -distribute_requires = minimal_requires + rpc_requires +extra_gradio_requires = [ + "gradio==4.19.1", + "modelscope_studio==0.0.5", +] -dev_requires = minimal_requires + test_requires +extra_rag_requires = [ + "llama-index==0.10.30", +] -full_requires = ( - minimal_requires - + rpc_requires - + service_requires - + doc_requires - + test_requires - + gradio_requires - + rag_requires - + studio_requires - + web_requires +# API requires +extra_gemini_requires = ["google-generativeai>=0.4.0"] +extra_litellm_requires = ["litellm"] +extra_zhipuai_requires = ["zhipuai"] +extra_ollama_requires = ["ollama>=0.1.7"] + +# Full requires +extra_full_requires = ( + extra_distribute_requires + + extra_service_requires + + extra_dev_requires + + extra_gradio_requires + + extra_rag_requires + + extra_gemini_requires + + extra_litellm_requires + + extra_zhipuai_requires + + extra_ollama_requires ) -online_requires = full_requires + [ +# For online workstation +extra_online_requires = extra_full_requires + [ "oss2", "flask_babel", "babel==2.15.0", @@ -141,10 +135,21 @@ }, install_requires=minimal_requires, extras_require={ - "distribute": distribute_requires, - "dev": dev_requires, - "full": full_requires, - "online": online_requires, + # For specific LLM API + "ollama": extra_ollama_requires, + "litellm": extra_litellm_requires, + "zhipuai": extra_zhipuai_requires, + "gemini": extra_gemini_requires, + # For service functions + "service": extra_service_requires, + # For distribution mode + "distribute": extra_distribute_requires, + # With unit test requires + "dev": extra_dev_requires, + # With full requires + "full": extra_full_requires, + # With online workstation requires + "online": extra_online_requires, }, license="Apache License 2.0", classifiers=[ diff --git a/src/agentscope/agents/__init__.py b/src/agentscope/agents/__init__.py index 65d86b278..8deaeca3a 100644 --- a/src/agentscope/agents/__init__.py +++ b/src/agentscope/agents/__init__.py @@ -5,7 +5,6 @@ from .dialog_agent import DialogAgent from .dict_dialog_agent import DictDialogAgent from .user_agent import UserAgent -from .rpc_agent import RpcAgent from .react_agent import ReActAgent from .rag_agent import LlamaIndexAgent @@ -18,6 +17,5 @@ "UserAgent", "ReActAgent", "DistConf", - "RpcAgent", "LlamaIndexAgent", ] diff --git a/src/agentscope/models/dashscope_model.py b/src/agentscope/models/dashscope_model.py index ba50b9f40..5d53f4d55 100644 --- a/src/agentscope/models/dashscope_model.py +++ b/src/agentscope/models/dashscope_model.py @@ -5,7 +5,6 @@ from http import HTTPStatus from typing import Any, Union, List, Sequence, Optional, Generator -from dashscope.api_entities.dashscope_response import GenerationResponse from loguru import logger from ..manager import FileManager @@ -14,8 +13,10 @@ try: import dashscope + from dashscope.api_entities.dashscope_response import GenerationResponse except ImportError: dashscope = None + GenerationResponse = None from .model import ModelWrapperBase, ModelResponse @@ -52,7 +53,8 @@ def __init__( if dashscope is None: raise ImportError( - "Cannot find dashscope package in current python environment.", + "The package 'dashscope' is not installed. Please install it " + "by running `pip install dashscope==1.14.1`", ) self.generate_args = generate_args or {} @@ -725,7 +727,7 @@ def __call__( messages=messages, **kwargs, ) - # Unhandle code path here + # Unhandled code path here # response could be a generator , if stream is yes # suggest add a check here if response.status_code != HTTPStatus.OK: diff --git a/src/agentscope/models/ollama_model.py b/src/agentscope/models/ollama_model.py index ec87f219f..55cc5083c 100644 --- a/src/agentscope/models/ollama_model.py +++ b/src/agentscope/models/ollama_model.py @@ -7,11 +7,6 @@ from ..models import ModelWrapperBase, ModelResponse from ..utils.common import _convert_to_str -try: - import ollama -except ImportError: - ollama = None - class OllamaWrapperBase(ModelWrapperBase, ABC): """The base class for Ollama model wrappers. @@ -67,6 +62,15 @@ def __init__( self.options = options self.keep_alive = keep_alive + + try: + import ollama + except ImportError as e: + raise ImportError( + "The package ollama is not found. Please install it by " + 'running command `pip install "ollama>=0.1.7"`', + ) from e + self.client = ollama.Client(host=host, **kwargs) diff --git a/src/agentscope/service/execute_code/exec_notebook.py b/src/agentscope/service/execute_code/exec_notebook.py index f296c41b0..a9ffcd285 100644 --- a/src/agentscope/service/execute_code/exec_notebook.py +++ b/src/agentscope/service/execute_code/exec_notebook.py @@ -1,23 +1,19 @@ # -*- coding: utf-8 -*- # pylint: disable=C0301 """Service for executing jupyter notebooks interactively -Partially referenced the implementation of https://github.com/geekan/MetaGPT/blob/main/metagpt/actions/di/execute_nb_code.py -""" # noqa +Partially referenced the implementation of +https://github.com/geekan/MetaGPT/blob/main/metagpt/actions/di/execute_nb_code.py +""" import base64 import asyncio from loguru import logger - try: - from nbclient import NotebookClient - from nbclient.exceptions import CellTimeoutError, DeadKernelError + import nbclient import nbformat -except ImportError as import_error: - from agentscope.utils.common import ImportErrorReporter - - nbclient = ImportErrorReporter(import_error) - nbformat = ImportErrorReporter(import_error) - NotebookClient = ImportErrorReporter(import_error) +except ImportError: + nbclient = None + nbformat = None from ...manager import FileManager from ..service_status import ServiceExecStatus @@ -66,8 +62,15 @@ def __init__( The timeout for each cell execution. Default to 300. """ + + if nbclient is None or nbformat is None: + raise ImportError( + "The package nbclient or nbformat is not found. Please " + "install it by `pip install notebook nbclient nbformat`", + ) + self.nb = nbformat.v4.new_notebook() - self.nb_client = NotebookClient(nb=self.nb) + self.nb_client = nbclient.NotebookClient(nb=self.nb) self.timeout = timeout asyncio.run(self._start_client()) @@ -113,7 +116,7 @@ async def _kill_client(self) -> None: async def _restart_client(self) -> None: """Restart the notebook client""" await self._kill_client() - self.nb_client = NotebookClient(self.nb, timeout=self.timeout) + self.nb_client = nbclient.NotebookClient(self.nb, timeout=self.timeout) await self._start_client() async def _run_cell(self, cell_index: int) -> ServiceResponse: @@ -127,13 +130,13 @@ async def _run_cell(self, cell_index: int) -> ServiceResponse: for output in self.nb.cells[cell_index].outputs ], ) - except DeadKernelError: + except nbclient.exceptions.DeadKernelError: await self.reset() return ServiceResponse( status=ServiceExecStatus.ERROR, content="DeadKernelError when executing cell, reset kernel", ) - except CellTimeoutError: + except nbclient.exceptions.CellTimeoutError: assert self.nb_client.km is not None await self.nb_client.km.interrupt_kernel() return ServiceResponse( diff --git a/src/agentscope/service/multi_modality/dashscope_services.py b/src/agentscope/service/multi_modality/dashscope_services.py index d3963bbc7..d372b597e 100644 --- a/src/agentscope/service/multi_modality/dashscope_services.py +++ b/src/agentscope/service/multi_modality/dashscope_services.py @@ -9,17 +9,11 @@ import os -import dashscope -from dashscope.audio.tts import SpeechSynthesizer - -from agentscope.models import ( +from ...models import ( DashScopeImageSynthesisWrapper, DashScopeMultiModalWrapper, ) -# SpeechSynthesizerWrapper is current not available - - from ..service_response import ( ServiceResponse, ServiceExecStatus, @@ -259,9 +253,17 @@ def dashscope_text_to_audio( > {'status': 'SUCCESS', 'content': {"audio_path": "AUDIO_PATH"}} """ + try: + import dashscope + except ImportError as e: + raise ImportError( + "The package 'dashscope' is not installed. Please install it by " + "running `pip install dashscope==1.14.1`", + ) from e + dashscope.api_key = api_key - res = SpeechSynthesizer.call( + res = dashscope.audio.tts.SpeechSynthesizer.call( model=model, text=text, sample_rate=sample_rate, diff --git a/src/agentscope/web/gradio/utils.py b/src/agentscope/web/gradio/utils.py index 5da255ed2..4b7285e65 100644 --- a/src/agentscope/web/gradio/utils.py +++ b/src/agentscope/web/gradio/utils.py @@ -10,8 +10,6 @@ from PIL import Image -from dashscope.audio.asr import RecognitionCallback, Recognition - SYS_MSG_PREFIX = "【SYSTEM】" thread_local_data = threading.local() @@ -170,7 +168,15 @@ def generate_image_from_name(name: str) -> str: def audio2text(audio_path: str) -> str: """Converts audio file at the given path to text using ASR.""" - # dashscope.api_key = "" + + try: + from dashscope.audio.asr import RecognitionCallback, Recognition + except ImportError as e: + raise ImportError( + "The package dashscope is not found. Please install it by " + "running `pip install dashscope==1.14.1`", + ) from e + callback = RecognitionCallback() rec = Recognition( model="paraformer-realtime-v1", diff --git a/tests/dashscope_services_test.py b/tests/dashscope_services_test.py index 198325e7f..ebad64e7b 100644 --- a/tests/dashscope_services_test.py +++ b/tests/dashscope_services_test.py @@ -235,12 +235,7 @@ def test_dashscope_image_to_text_failure( self.assertEqual(results.status, expected_result.status) self.assertEqual(results.content, expected_result.content) - @patch( - ( - "agentscope.service.multi_modality.dashscope_services." - "SpeechSynthesizer" - ), - ) + @patch("dashscope.audio.tts.SpeechSynthesizer") @patch("agentscope.service.multi_modality.dashscope_services.os.makedirs") @patch( "agentscope.service.multi_modality.dashscope_services.os.path.exists", diff --git a/tests/ollama_test.py b/tests/ollama_test.py index e00e49ba0..7e03ed658 100644 --- a/tests/ollama_test.py +++ b/tests/ollama_test.py @@ -80,13 +80,13 @@ def setUp(self) -> None: "eval_duration": 223689000, } - @patch("agentscope.models.ollama_model.ollama") - def test_ollama_chat(self, mock_ollama: MagicMock) -> None: + @patch("ollama.Client") + def test_ollama_chat(self, mock_ollama_client: MagicMock) -> None: """Unit test for ollama chat API.""" # prepare the mock - mock_ollama_client = MagicMock() - mock_ollama.Client.return_value = mock_ollama_client - mock_ollama_client.chat.return_value = self.dummy_response + mock_client_instance = MagicMock() + mock_ollama_client.return_value = mock_client_instance + mock_client_instance.chat.return_value = self.dummy_response # run test agentscope.init( @@ -109,13 +109,13 @@ def test_ollama_chat(self, mock_ollama: MagicMock) -> None: self.assertEqual(response.raw, self.dummy_response) - @patch("agentscope.models.ollama_model.ollama") - def test_ollama_embedding(self, mock_ollama: MagicMock) -> None: + @patch("ollama.Client") + def test_ollama_embedding(self, mock_ollama_client: MagicMock) -> None: """Unit test for ollama embeddings API.""" # prepare the mock - mock_ollama_client = MagicMock() - mock_ollama.Client.return_value = mock_ollama_client - mock_ollama_client.embeddings.return_value = self.dummy_embedding + mock_client_instance = MagicMock() + mock_ollama_client.return_value = mock_client_instance + mock_client_instance.embeddings.return_value = self.dummy_embedding # run test agentscope.init( @@ -138,13 +138,13 @@ def test_ollama_embedding(self, mock_ollama: MagicMock) -> None: self.assertEqual(response.raw, self.dummy_embedding) - @patch("agentscope.models.ollama_model.ollama") - def test_ollama_generate(self, mock_ollama: MagicMock) -> None: + @patch("ollama.Client") + def test_ollama_generate(self, mock_ollama_client: MagicMock) -> None: """Unit test for ollama generate API.""" # prepare the mock - mock_ollama_client = MagicMock() - mock_ollama.Client.return_value = mock_ollama_client - mock_ollama_client.generate.return_value = self.dummy_generate + mock_client_instance = MagicMock() + mock_ollama_client.return_value = mock_client_instance + mock_client_instance.generate.return_value = self.dummy_generate # run test agentscope.init( diff --git a/tests/rpc_agent_test.py b/tests/rpc_agent_test.py index bda005882..90f21b163 100644 --- a/tests/rpc_agent_test.py +++ b/tests/rpc_agent_test.py @@ -22,7 +22,7 @@ from agentscope.msghub import msghub from agentscope.pipelines import sequentialpipeline from agentscope.rpc.rpc_agent_client import RpcAgentClient -from agentscope.agents import RpcAgent +from agentscope.agents.rpc_agent import RpcAgent from agentscope.exception import AgentCallError, QuotaExceededError