Skip to content

Commit

Permalink
update examples
Browse files Browse the repository at this point in the history
  • Loading branch information
garylin2099 committed Mar 12, 2024
1 parent 38f2113 commit a680a1a
Show file tree
Hide file tree
Showing 11 changed files with 32 additions and 16 deletions.
26 changes: 22 additions & 4 deletions examples/di/crawl_webpage.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,31 @@

from metagpt.roles.di.data_interpreter import DataInterpreter

PAPER_LIST_REQ = """"
Get data from `paperlist` table in https://papercopilot.com/statistics/iclr-statistics/iclr-2024-statistics/,
and save it to a csv file. paper title must include `multiagent` or `large language model`. *notice: print key variables*
"""

ECOMMERCE_REQ = """
Get products data from website https://scrapeme.live/shop/ and save it as a csv file.
**Notice: Firstly parse the web page encoding and the text HTML structure;
The first page product name, price, product URL, and image URL must be saved in the csv;**
"""

NEWS_36KR_REQ = """从36kr创投平台https://pitchhub.36kr.com/financing-flash 所有初创企业融资的信息, **注意: 这是一个中文网站**;
下面是一个大致流程, 你会根据每一步的运行结果对当前计划中的任务做出适当调整:
1. 爬取并本地保存html结构;
2. 直接打印第7个*`快讯`*关键词后2000个字符的html内容, 作为*快讯的html内容示例*;
3. 反思*快讯的html内容示例*中的规律, 设计正则匹配表达式来获取*`快讯`*的标题、链接、时间;
4. 筛选最近3天的初创企业融资*`快讯`*, 以list[dict]形式打印前5个。
5. 将全部结果存在本地csv中
"""


async def main():
prompt = """Get data from `paperlist` table in https://papercopilot.com/statistics/iclr-statistics/iclr-2024-statistics/,
and save it to a csv file. paper title must include `multiagent` or `large language model`. *notice: print key variables*"""
di = DataInterpreter(use_tools=True)
di = DataInterpreter(tools=["scrape_web_playwright"])

await di.run(prompt)
await di.run(ECOMMERCE_REQ)


if __name__ == "__main__":
Expand Down
2 changes: 1 addition & 1 deletion examples/di/data_visualization.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


async def main(requirement: str = ""):
di = DataInterpreter(use_tools=False)
di = DataInterpreter()
await di.run(requirement)


Expand Down
2 changes: 1 addition & 1 deletion examples/di/email_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ async def main():
Firstly, Please help me fetch the latest 5 senders and full letter contents.
Then, summarize each of the 5 emails into one sentence (you can do this by yourself, no need to import other models to do this) and output them in a markdown format."""

di = DataInterpreter(use_tools=True)
di = DataInterpreter()

await di.run(prompt)

Expand Down
2 changes: 1 addition & 1 deletion examples/di/imitate_webpage.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ async def main():
Secondly, convert image to a webpage including HTML, CSS and JS in one go.
Finally, save webpage in a text file.
Note: All required dependencies and environments have been fully installed and configured."""
di = DataInterpreter(use_tools=True)
di = DataInterpreter(tools=["GPTvGenerator"])

await di.run(prompt)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import asyncio

from metagpt.roles.di.ml_engineer import MLEngineer
from metagpt.roles.di.data_interpreter import DataInterpreter


async def main(requirement: str):
role = MLEngineer(auto_run=True, use_tools=True)
role = DataInterpreter(tools=["<all>"])
await role.run(requirement)


Expand Down
2 changes: 1 addition & 1 deletion examples/di/rm_image_background.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


async def main(requirement: str = ""):
di = DataInterpreter(use_tools=False)
di = DataInterpreter()
await di.run(requirement)


Expand Down
2 changes: 1 addition & 1 deletion examples/di/sd_tool_usage.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@


async def main(requirement: str = ""):
di = DataInterpreter(use_tools=True, goal=requirement)
di = DataInterpreter(tools=["SDEngine"])
await di.run(requirement)


Expand Down
2 changes: 1 addition & 1 deletion examples/di/solve_math_problems.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


async def main(requirement: str = ""):
di = DataInterpreter(use_tools=False)
di = DataInterpreter()
await di.run(requirement)


Expand Down
2 changes: 1 addition & 1 deletion metagpt/roles/di/data_interpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from metagpt.actions.di.execute_nb_code import ExecuteNbCode
from metagpt.actions.di.write_analysis_code import CheckData, WriteCodeWithTools
from metagpt.logs import logger
from metagpt.prompts.mi.write_analysis_code import DATA_INFO
from metagpt.prompts.di.write_analysis_code import DATA_INFO
from metagpt.roles import Role
from metagpt.schema import Message, Task, TaskResult
from metagpt.strategy.task_type import TaskType
Expand Down
1 change: 0 additions & 1 deletion metagpt/tools/libs/sd_engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from aiohttp import ClientSession
from PIL import Image, PngImagePlugin

#
from metagpt.const import SD_OUTPUT_FILE_REPO, SOURCE_ROOT
from metagpt.logs import logger
from metagpt.tools.tool_registry import register_tool
Expand Down
3 changes: 1 addition & 2 deletions metagpt/tools/tool_recommend.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,7 @@ async def run(self, prompt):
class ToolRecommender(BaseModel):
"""
The default ToolRecommender:
1. Recall: If plan exists, use exact match between task type and tool type to recall tools;
If plan doesn't exist (e.g. we use ReAct), return all user-specified tools;
1. Recall: To be implemented in subclasses. Recall tools based on the given context and plan.
2. Rank: Use LLM to select final candidates from recalled set.
"""

Expand Down

0 comments on commit a680a1a

Please sign in to comment.