From 55ce04168859aa16a1437e63279cc8fd49cebe0e Mon Sep 17 00:00:00 2001 From: yingying Date: Wed, 10 Apr 2024 14:04:19 +0800 Subject: [PATCH] feat: add the search tool tavily to the agent --- lui/package.json | 2 +- lui/src/Assistant/index.md | 2 +- lui/src/Assistant/index.tsx | 2 +- lui/src/Chat/index.md | 2 +- lui/src/Chat/index.tsx | 36 ++++++++++++++++++----------- lui/src/StopBtn/index.md | 2 +- lui/src/ThoughtChain/index.md | 2 +- lui/src/ThoughtChain/index.tsx | 37 +++++------------------------- lui/src/mock/inputArea.mock.ts | 2 +- lui/src/services/ChatController.ts | 2 +- lui/src/utils/chatTranslator.ts | 2 +- package.json | 2 +- server/.env.example | 5 ++-- server/agent/stream.py | 18 ++++++++++----- server/main.py | 6 ++--- server/requirements.txt | 1 + server/tools/issue.py | 6 ++--- server/tools/sourcecode.py | 6 ++--- server/uilts/env.py | 17 ++++++++++++++ 19 files changed, 79 insertions(+), 73 deletions(-) create mode 100644 server/uilts/env.py diff --git a/lui/package.json b/lui/package.json index 3b25aefa..9fdb05e0 100644 --- a/lui/package.json +++ b/lui/package.json @@ -1,6 +1,6 @@ { "name": "petercat-lui", - "version": "0.0.3", + "version": "0.0.4", "description": "A react library developed with dumi", "module": "dist/index.js", "types": "dist/index.d.ts", diff --git a/lui/src/Assistant/index.md b/lui/src/Assistant/index.md index e3cfea88..d668367b 100644 --- a/lui/src/Assistant/index.md +++ b/lui/src/Assistant/index.md @@ -6,7 +6,7 @@ atomId: Assistant ```tsx import React from 'react'; -import { Assistant } from 'lui'; +import { Assistant } from 'petercat-lui'; export default () => ( { className="fixed right-0 top-0 h-full flex flex-row z-[999] overflow-hidden text-left text-black bg-gradient-to-r from-f2e9ed via-e9eefb to-f0eeea shadow-[0px_0px_1px_#919eab3d]" style={{ width: drawerWidth, zIndex: 9999 }} > - +
} diff --git a/lui/src/Chat/index.md b/lui/src/Chat/index.md index 4cc26222..303066ff 100644 --- a/lui/src/Chat/index.md +++ b/lui/src/Chat/index.md @@ -1,7 +1,7 @@ ```jsx import React from 'react'; -import { Chat } from 'lui'; +import { Chat } from 'petercat-lui'; export default () => ( diff --git a/lui/src/Chat/index.tsx b/lui/src/Chat/index.tsx index 6d7cd151..07c406e1 100644 --- a/lui/src/Chat/index.tsx +++ b/lui/src/Chat/index.tsx @@ -6,14 +6,14 @@ import type { } from '@ant-design/pro-chat'; import { ProChat } from '@ant-design/pro-chat'; import { Markdown } from '@ant-design/pro-editor'; -import StopBtn from 'lui/StopBtn'; -import { theme } from 'lui/Theme'; -import ThoughtChain from 'lui/ThoughtChain'; -import { Role } from 'lui/interface'; -import { BOT_INFO } from 'lui/mock'; -import { streamChat } from 'lui/services/ChatController'; -import { handleStream } from 'lui/utils'; import React, { ReactNode, memo, useRef, useState, type FC } from 'react'; +import StopBtn from '../StopBtn'; +import { theme } from '../Theme'; +import ThoughtChain from '../ThoughtChain'; +import { Role } from '../interface'; +import { BOT_INFO } from '../mock'; +import { streamChat } from '../services/ChatController'; +import { handleStream } from '../utils'; import Actions from './inputArea/actions'; const { getDesignToken } = theme; @@ -23,15 +23,19 @@ export interface ChatProps { assistantMeta?: MetaData; helloMessage?: string; host?: string; + drawerWidth?: number; slot?: { componentID: string; renderFunc: (data: any) => React.ReactNode; }[]; } -const Chat: FC = memo(({ helloMessage, host }) => { +const Chat: FC = memo(({ helloMessage, host, drawerWidth }) => { const proChatRef = useRef(); const [chats, setChats] = useState>[]>(); + const messageMinWidth = drawerWidth + ? `calc(${drawerWidth}px - 90px)` + : '100%'; return (
= memo(({ helloMessage, host }) => { }, contentRender: (props: ChatItemProps, defaultDom: ReactNode) => { const originData = props.originData || {}; + if (originData?.role === Role.user) { + return defaultDom; + } const message = originData.content; + const defaultMessageContent = ( +
{defaultDom}
+ ); if (!message || !message.startsWith('')) { - return defaultDom; + return defaultMessageContent; } const [toolStr, answerStr] = message.split(''); @@ -75,7 +85,7 @@ const Chat: FC = memo(({ helloMessage, host }) => { if (!match) { console.error('No valid JSON found in input'); - return defaultDom; + return defaultMessageContent; } try { @@ -83,7 +93,7 @@ const Chat: FC = memo(({ helloMessage, host }) => { const { type, extra } = config; if (![Role.knowledge, Role.tool].includes(type)) { - return defaultDom; + return defaultMessageContent; } const { status, source } = extra; @@ -91,7 +101,7 @@ const Chat: FC = memo(({ helloMessage, host }) => { return (
= memo(({ helloMessage, host }) => { ); } catch (error) { console.error(`JSON parse error: ${error}`); - return defaultDom; + return defaultMessageContent; } }, }} diff --git a/lui/src/StopBtn/index.md b/lui/src/StopBtn/index.md index c7b71659..3708a60a 100644 --- a/lui/src/StopBtn/index.md +++ b/lui/src/StopBtn/index.md @@ -5,7 +5,7 @@ atomId: StopBtn # StopBtn ``` tsx import React from 'react'; -import { StopBtn } from 'lui'; +import { StopBtn } from 'petercat-lui'; export default () => ; ``` diff --git a/lui/src/ThoughtChain/index.md b/lui/src/ThoughtChain/index.md index 609cff18..6e67e66d 100644 --- a/lui/src/ThoughtChain/index.md +++ b/lui/src/ThoughtChain/index.md @@ -6,7 +6,7 @@ atomId: ThoughtChain ```tsx import React from 'react'; -import { ThoughtChain } from 'lui'; +import { ThoughtChain } from 'petercat-lui'; export default () => ( = (params) => { ), - children: ( - { - const { - status: itemStatus, - knowledgeName, - pluginName, - } = (panelProps as IExtraInfo) || {}; - - if (itemStatus === Status.loading) { - return ; - } else if (knowledgeName) { - return ; - } else if (pluginName) { - return ; - } - return <>; - }} - > - {safeJsonParse(content?.data) ? ( - - {JSON.stringify(safeJsonParse(content?.data), null, 2)} - - ) : ( - <>{content?.data} - )} - + children: safeJsonParse(content?.data) ? ( + + {JSON.stringify(safeJsonParse(content?.data), null, 2)} + + ) : ( + <>{content?.data} ), }, ]; diff --git a/lui/src/mock/inputArea.mock.ts b/lui/src/mock/inputArea.mock.ts index 4f452bfd..a73d8645 100644 --- a/lui/src/mock/inputArea.mock.ts +++ b/lui/src/mock/inputArea.mock.ts @@ -1,4 +1,4 @@ -import { IBot } from 'lui/interface'; +import { IBot } from '../interface'; export const DEFAULT_HELLO_MESSAGE = '我是你的私人助理Kate, 我有许多惊人的能力,比如你可以对我说我想创建一个机器人'; diff --git a/lui/src/services/ChatController.ts b/lui/src/services/ChatController.ts index 20930987..1859282d 100644 --- a/lui/src/services/ChatController.ts +++ b/lui/src/services/ChatController.ts @@ -1,4 +1,4 @@ -import { IPrompt } from 'lui/interface'; +import { IPrompt } from '../interface'; /** * Chat api diff --git a/lui/src/utils/chatTranslator.ts b/lui/src/utils/chatTranslator.ts index 904de5e5..a98566bc 100644 --- a/lui/src/utils/chatTranslator.ts +++ b/lui/src/utils/chatTranslator.ts @@ -1,5 +1,5 @@ import { map } from 'lodash'; -import { Role } from 'lui/interface'; +import { Role } from '../interface'; export const convertChunkToJson = (rawData: string) => { const regex = /data:(.*)/; diff --git a/package.json b/package.json index 3df548b3..934f05da 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "axios": "^1.6.7", "concurrently": "^8.2.2", "dayjs": "^1.11.10", - "petercat-lui": "^0.0.3", + "petercat-lui": "^0.0.4", "eslint": "8.46.0", "eslint-config-next": "13.4.12", "framer-motion": "^10.16.15", diff --git a/server/.env.example b/server/.env.example index 72bde0a3..ce5a272d 100644 --- a/server/.env.example +++ b/server/.env.example @@ -103,5 +103,6 @@ DOCKER_SOCKET_LOCATION=/var/run/docker.sock GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER -# GitHub Access Token -GITHUB_TOKEN=GITHUB_TOKEN + +#TAVILY_API_KEY +TAVILY_API_KEY=TAVILY_API_KEY diff --git a/server/agent/stream.py b/server/agent/stream.py index bc947a1d..fc420599 100644 --- a/server/agent/stream.py +++ b/server/agent/stream.py @@ -1,5 +1,6 @@ import datetime import json +import os import uuid from langchain.tools import tool from typing import AsyncIterator @@ -12,11 +13,15 @@ from langchain.prompts import MessagesPlaceholder from langchain_core.utils.function_calling import convert_to_openai_tool from langchain_core.prompts import ChatPromptTemplate +from langchain.utilities.tavily_search import TavilySearchAPIWrapper +from langchain.tools.tavily_search import TavilySearchResults from langchain_openai import ChatOpenAI +from uilts.env import get_env_variable from tools import issue from tools import sourcecode from langchain_core.messages import AIMessage, FunctionMessage, HumanMessage +TAVILY_API_KEY = get_env_variable("TAVILY_API_KEY") prompt = ChatPromptTemplate.from_messages( [ @@ -56,10 +61,11 @@ def get_datetime() -> datetime: TOOLS = ["get_datetime", "create_issue", "get_issues", "search_issues", "search_code"] -def _create_agent_with_tools(openai_api_key: str ) -> AgentExecutor: - openai_api_key=openai_api_key - llm = ChatOpenAI(model="gpt-4", temperature=0.2, streaming=True) - tools = [] +def _create_agent_with_tools(open_api_key: str) -> AgentExecutor: + llm = ChatOpenAI(model="gpt-4-1106-preview", temperature=0.2, streaming=True, max_tokens=1500, openai_api_key=open_api_key) + search = TavilySearchAPIWrapper() + tavily_tool = TavilySearchResults(api_wrapper=search) + tools = [tavily_tool] for requested_tool in TOOLS: if requested_tool not in TOOL_MAPPING: @@ -104,10 +110,10 @@ def chat_history_transform(messages: list[Message]): return transformed_messages -async def agent_chat(input_data: ChatData, openai_api_key) -> AsyncIterator[str]: +async def agent_chat(input_data: ChatData, open_api_key: str) -> AsyncIterator[str]: try: messages = input_data.messages - agent_executor = _create_agent_with_tools(openai_api_key) + agent_executor = _create_agent_with_tools(open_api_key) print(chat_history_transform(messages)) async for event in agent_executor.astream_events( { diff --git a/server/main.py b/server/main.py index 57f9586e..b1d88e94 100644 --- a/server/main.py +++ b/server/main.py @@ -1,4 +1,3 @@ -import os from fastapi import FastAPI from fastapi.responses import StreamingResponse from fastapi.middleware.cors import CORSMiddleware @@ -6,9 +5,10 @@ from openai_api import dalle from langchain_api import chat from agent import stream +from uilts.env import get_env_variable import uvicorn -open_api_key = os.getenv("OPENAI_API_KEY") +open_api_key = get_env_variable("OPENAI_API_KEY") app = FastAPI( title="Bo-meta Server", @@ -46,4 +46,4 @@ def run_agent_chat(input_data: ChatData): return StreamingResponse(result, media_type="text/event-stream") if __name__ == "__main__": - uvicorn.run(app, host="0.0.0.0", port=int(os.environ.get("PORT", "8080"))) \ No newline at end of file + uvicorn.run(app, host="0.0.0.0", port=int(os.environ.get("PORT", "8080"))) diff --git a/server/requirements.txt b/server/requirements.txt index dc1dd3d5..c40a37db 100644 --- a/server/requirements.txt +++ b/server/requirements.txt @@ -10,3 +10,4 @@ langchain-openai PyGithub python-multipart httpx[socks] +load_dotenv diff --git a/server/tools/issue.py b/server/tools/issue.py index 856a703b..0a4342fc 100644 --- a/server/tools/issue.py +++ b/server/tools/issue.py @@ -1,14 +1,12 @@ import json -import os from typing import Optional from github import Github from langchain.tools import tool - -GITHUB_TOKEN = os.getenv('GITHUB_TOKEN') +from uilts.env import get_env_variable DEFAULT_REPO_NAME = "ant-design/ant-design" -g = Github(GITHUB_TOKEN) +g = Github() @tool def create_issue(repo_name, title, body): diff --git a/server/tools/sourcecode.py b/server/tools/sourcecode.py index 2f0ba5d6..c0bf304f 100644 --- a/server/tools/sourcecode.py +++ b/server/tools/sourcecode.py @@ -1,15 +1,13 @@ -import os from typing import List, Optional from github import Github from github.ContentFile import ContentFile from langchain.tools import tool +from uilts.env import get_env_variable -GITHUB_TOKEN = os.getenv('GITHUB_TOKEN') - DEFAULT_REPO_NAME = "ant-design/ant-design" -g = Github(GITHUB_TOKEN) +g = Github() @tool def search_code( diff --git a/server/uilts/env.py b/server/uilts/env.py new file mode 100644 index 00000000..6d1b8a56 --- /dev/null +++ b/server/uilts/env.py @@ -0,0 +1,17 @@ +from dotenv import load_dotenv +import os + +# Define a method to load an environmental variable and return its value +def get_env_variable(key: str, default=None): + """ + Retrieve the specified environment variable. Return the specified default value if the variable does not exist. + + :param key: The name of the environment variable to retrieve. + :param default: The default value to return if the environment variable does not exist. + :return: The value of the environment variable, or the default value if it does not exist. + """ + # Load the .env file + load_dotenv(verbose=True, override=True) + + # Get the environment variable, returning the default value if it does not exist + return os.getenv(key, default)