-
Notifications
You must be signed in to change notification settings - Fork 42
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- 支持换模 `gemini-1.5-flash`
- Loading branch information
Showing
17 changed files
with
203 additions
and
65 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,6 @@ | ||
[tool.poetry] | ||
name = "petercat_utils" | ||
version = "0.1.28" | ||
version = "0.1.30" | ||
description = "" | ||
authors = ["raoha.rh <[email protected]>"] | ||
readme = "README.md" | ||
|
@@ -24,7 +24,7 @@ md_report_color = "auto" | |
python = "^3.8" | ||
langchain_community = "^0.2.11" | ||
langchain_openai = "^0.1.20" | ||
langchain_core = "0.2.28" | ||
langchain_core = "^0.2.28" | ||
langchain = "^0.2.12" | ||
supabase = "2.6.0" | ||
pydantic = "2.7.0" | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
from typing import Optional | ||
|
||
from agent.llm.base import BaseLLMClient | ||
from agent.llm.gemini import GeminiClient | ||
from agent.llm.openai import OpenAIClient | ||
from petercat_utils.utils.env import get_env_variable | ||
|
||
OPEN_API_KEY = get_env_variable("OPENAI_API_KEY") | ||
GEMINI_API_KEY = get_env_variable("GEMINI_API_KEY") | ||
|
||
def get_llm( | ||
llm: str = 'openai', | ||
temperature: Optional[int] = 0.2, | ||
max_tokens: Optional[int] = 1500, | ||
streaming: Optional[bool] = False | ||
) -> BaseLLMClient: | ||
|
||
match llm: | ||
case "openai": | ||
return OpenAIClient(temperature=temperature, streaming=streaming, max_tokens=max_tokens) | ||
case "gemini": | ||
return GeminiClient(temperature=temperature,streaming=streaming, max_tokens=max_tokens) | ||
return None |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,22 @@ | ||
|
||
from abc import abstractmethod | ||
from typing import Any, Dict, List, Optional | ||
from langchain_core.language_models import BaseChatModel | ||
|
||
from petercat_utils.data_class import MessageContent | ||
|
||
class BaseLLMClient(): | ||
def __init__(self, temperature: Optional[int] = 0.2, max_tokens: Optional[int] = 1500, streaming: Optional[bool] = False): | ||
pass | ||
|
||
@abstractmethod | ||
def get_client() -> BaseChatModel: | ||
pass | ||
|
||
@abstractmethod | ||
def get_tools(self, tool: List[Any]) -> list[Dict[str, Any]]: | ||
pass | ||
|
||
@abstractmethod | ||
def parse_content(self, content: List[MessageContent]) -> List[MessageContent]: | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
from typing import Any, List, Optional | ||
from langchain_google_genai import ChatGoogleGenerativeAI | ||
from langchain_google_genai.chat_models import convert_to_genai_function_declarations | ||
from langchain_openai import ChatOpenAI | ||
|
||
from agent.llm.base import BaseLLMClient | ||
from petercat_utils.data_class import ImageRawURLContentBlock, MessageContent | ||
from petercat_utils.utils.env import get_env_variable | ||
|
||
GEMINI_API_KEY = get_env_variable("GEMINI_API_KEY") | ||
|
||
def parse_gemini_input(message: MessageContent): | ||
match message.type: | ||
case "image_url": | ||
return ImageRawURLContentBlock(image_url=message.image_url.url, type="image_url") | ||
case _: | ||
return message | ||
|
||
class GeminiClient(BaseLLMClient): | ||
_client: ChatOpenAI | ||
|
||
def __init__(self, temperature: Optional[int] = 0.2, max_tokens: Optional[int] = 1500, streaming: Optional[bool] = False): | ||
self._client = ChatGoogleGenerativeAI( | ||
model="gemini-1.5-flash", | ||
temperature=temperature, | ||
streaming=streaming, | ||
max_tokens=max_tokens, | ||
google_api_key=GEMINI_API_KEY, | ||
) | ||
|
||
def get_client(self): | ||
return self._client | ||
|
||
def get_tools(self, tools: List[Any]): | ||
return [convert_to_genai_function_declarations(tool) for tool in tools] | ||
|
||
def parse_content(self, content: List[MessageContent]): | ||
result = [parse_gemini_input(message=message) for message in content] | ||
print(f"parse_content, content={content}, result={result}") | ||
return result |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,32 @@ | ||
from typing import Any, List, Optional | ||
from langchain_openai import ChatOpenAI | ||
from langchain_core.utils.function_calling import convert_to_openai_tool | ||
|
||
from agent.llm.base import BaseLLMClient | ||
from petercat_utils.data_class import MessageContent | ||
from petercat_utils.utils.env import get_env_variable | ||
|
||
|
||
OPEN_API_KEY = get_env_variable("OPENAI_API_KEY") | ||
|
||
class OpenAIClient(BaseLLMClient): | ||
_client: ChatOpenAI | ||
|
||
def __init__(self, temperature: Optional[int] = 0.2, max_tokens: Optional[int] = 1500, streaming: Optional[bool] = False): | ||
self._client = ChatOpenAI( | ||
model_name="gpt-4o", | ||
temperature=temperature, | ||
streaming=streaming, | ||
max_tokens=max_tokens, | ||
openai_api_key=OPEN_API_KEY, | ||
) | ||
|
||
def get_client(self): | ||
return self._client | ||
|
||
def get_tools(self, tools: List[Any]): | ||
return [convert_to_openai_tool(tool) for tool in tools] | ||
|
||
def parse_content(self, content: List[MessageContent]): | ||
print(f"parse_content: {content}") | ||
return content |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,58 +1,44 @@ | ||
from typing import AsyncIterator, Optional | ||
from petercat_utils import get_client | ||
from petercat_utils.data_class import ChatData | ||
|
||
from agent.base import AgentBuilder | ||
from agent.llm import get_llm | ||
from dao.botDAO import BotDAO | ||
from models.bot import Bot | ||
from prompts.bot_template import generate_prompt_by_repo_name | ||
from petercat_utils.data_class import ChatData | ||
|
||
from tools import issue, sourcecode, knowledge, git_info | ||
|
||
|
||
def get_tools(bot_id: str, token: Optional[str]): | ||
def get_tools(bot: Bot, token: Optional[str]): | ||
issue_tools = issue.factory(access_token=token) | ||
return { | ||
"search_knowledge": knowledge.factory(bot_id=bot_id), | ||
"search_knowledge": knowledge.factory(bot_id=bot.id), | ||
"create_issue": issue_tools["create_issue"], | ||
"get_issues": issue_tools["get_issues"], | ||
"search_issues": issue_tools["search_issues"], | ||
"search_code": sourcecode.search_code, | ||
"search_repo": git_info.search_repo, | ||
} | ||
|
||
|
||
def init_prompt(input_data: ChatData): | ||
if input_data.prompt: | ||
prompt = input_data.prompt | ||
elif input_data.bot_id: | ||
try: | ||
supabase = get_client() | ||
res = ( | ||
supabase.table("bots") | ||
.select("prompt") | ||
.eq("id", input_data.bot_id) | ||
.execute() | ||
) | ||
prompt = res.data[0]["prompt"] | ||
except Exception as e: | ||
print(e) | ||
prompt = generate_prompt_by_repo_name("ant-design") | ||
else: | ||
prompt = generate_prompt_by_repo_name("ant-design") | ||
|
||
return prompt | ||
|
||
|
||
def agent_stream_chat(input_data: ChatData, user_token: str) -> AsyncIterator[str]: | ||
bot_dao = BotDAO() | ||
bot = bot_dao.get_bot(input_data.bot_id) | ||
|
||
agent = AgentBuilder( | ||
prompt=init_prompt(input_data), | ||
tools=get_tools(bot_id=input_data.bot_id, token=user_token), | ||
streaming=True, | ||
chat_model=get_llm(bot.llm), | ||
prompt=bot.prompt or generate_prompt_by_repo_name("ant-design"), | ||
tools=get_tools(bot=bot, token=user_token), | ||
) | ||
return agent.run_stream_chat(input_data) | ||
|
||
|
||
def agent_chat(input_data: ChatData, user_token: Optional[str]) -> AsyncIterator[str]: | ||
def agent_chat(input_data: ChatData, user_token: Optional[str], llm: Optional[str] = "openai") -> AsyncIterator[str]: | ||
bot_dao = BotDAO() | ||
bot = bot_dao.get_bot(input_data.bot_id) | ||
|
||
agent = AgentBuilder( | ||
prompt=init_prompt(input_data), | ||
tools=get_tools(input_data.bot_id, token=user_token), | ||
chat_model=get_llm(bot.llm), | ||
prompt=bot.prompt or generate_prompt_by_repo_name("ant-design"), | ||
tools=get_tools(bot, token=user_token), | ||
) | ||
return agent.run_chat(input_data) |
Oops, something went wrong.