Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(v2)!: update langchain #968

Merged
merged 2 commits into from
Nov 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion core/cat/factory/custom_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

from langchain_core.language_models.llms import LLM
from langchain_openai.chat_models import ChatOpenAI
from langchain_community.chat_models.ollama import ChatOllama
from langchain_ollama import ChatOllama



Expand Down
22 changes: 11 additions & 11 deletions core/cat/looking_glass/cheshire_cat.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
from langchain_core.runnables import RunnableLambda
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers.string import StrOutputParser
from langchain_community.llms import Cohere
from langchain_openai import ChatOpenAI, OpenAI
from langchain_google_genai import ChatGoogleGenerativeAI
from langchain_cohere import ChatCohere

from cat.factory.auth_handler import get_auth_handler_from_name
from cat.factory.custom_auth_handler import CoreAuthHandler
Expand All @@ -29,6 +29,7 @@
from cat.utils import singleton
from cat import utils


class Procedure(Protocol):
name: str
procedure_type: str # "tool" or "form"
Expand Down Expand Up @@ -204,7 +205,7 @@ def load_language_embedder(self) -> embedders.EmbedderSettings:
# For Azure avoid automatic embedder selection

# Cohere
elif type(self._llm) in [Cohere]:
elif type(self._llm) in [ChatCohere]:
embedder = embedders.EmbedderCohereConfig.get_embedder_from_config(
{
"cohere_api_key": self._llm.cohere_api_key,
Expand All @@ -231,7 +232,6 @@ def load_language_embedder(self) -> embedders.EmbedderSettings:
return embedder

def load_auth(self):

# Custom auth_handler # TODOAUTH: change the name to custom_auth
selected_auth_handler = crud.get_setting_by_name(name="auth_handler_selected")

Expand Down Expand Up @@ -407,22 +407,22 @@ def llm(self, prompt, *args, **kwargs) -> str:
caller = utils.get_caller_info()

# here we deal with motherfucking langchain
prompt = ChatPromptTemplate(
messages=[
SystemMessage(content=prompt)
]
)
prompt = ChatPromptTemplate(messages=[SystemMessage(content=prompt)])

chain = (
prompt
| RunnableLambda(lambda x: utils.langchain_log_prompt(x, f"{caller} prompt"))
| RunnableLambda(
lambda x: utils.langchain_log_prompt(x, f"{caller} prompt")
)
| self._llm
| RunnableLambda(lambda x: utils.langchain_log_output(x, f"{caller} prompt output"))
| RunnableLambda(
lambda x: utils.langchain_log_output(x, f"{caller} prompt output")
)
| StrOutputParser()
)

output = chain.invoke(
{}, # in case we need to pass info to the template
{}, # in case we need to pass info to the template
)

return output
63 changes: 32 additions & 31 deletions core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,44 +9,45 @@ authors = [
{ name = "Cheshire Cat AI Contributors" },
]
dependencies = [
"gunicorn==20.1.0",
"uvicorn[standard]==0.20.0",
"python-multipart==0.0.6",
"pydantic==2.4.2",
"fastapi==0.110.2",
"scalar-fastapi==1.0.2",
"websockets==10.4",
"gunicorn==23.0.0",
"uvicorn[standard]==0.32.0",
"python-multipart==0.0.17",
"pydantic==2.9.2",
"fastapi==0.115.4",
"scalar-fastapi==1.0.3",
"websockets==13.1",
"PyJWT==2.8.0",
"bcrypt==4.1.3",
"Jinja2==3.1.4",
"pandas==1.5.3",
"scikit-learn==1.2.1",
"qdrant_client==1.11.0",
"langchain==0.2.1",
"langchain-community==0.2.1",
"langchain-openai==0.1.7",
"langchain-anthropic==0.1.23",
"langchain-google-genai==1.0.8",
"langchain-cohere==0.1.5",
"huggingface-hub==0.20.3",
"pandas==2.2.3",
"scikit-learn==1.5.2",
"qdrant-client==1.12.1",
"langchain==0.3.7",
"langchain-openai==0.2.6",
"langchain-community==0.3.4",
"langchain-anthropic==0.2.4",
"langchain-google-genai==2.0.4",
"langchain-cohere==0.3.1",
"langchain-ollama==0.2.0",
"huggingface-hub==0.26.2",
"beautifulsoup4==4.12.3",
"pdfminer.six==20221105",
"unstructured==0.12.6",
"tiktoken==0.7.0",
"tomli",
"pdfminer.six==20240706",
"unstructured==0.16.4",
"tiktoken==0.8.0",
"tomli==2.0.2",
"loguru==0.7.2",
"text_generation==0.6.1",
"text_generation==0.7.0",
"tinydb==4.8.0",
"python-slugify==8.0.1",
"autopep8",
"pylint",
"perflint",
"pylint-actions",
"pytest",
"pytest-asyncio",
"httpx",
"fastembed==0.3.6",
"rapidfuzz==3.6.1",
"autopep8==2.3.1",
"pylint==3.3.1",
"perflint==0.8.1",
"pylint-actions==0.5.0",
"pytest==8.3.3",
"pytest-asyncio==0.24.0",
"httpx==0.27.2",
"fastembed==0.4.1",
"rapidfuzz==3.10.1",
"APScheduler==3.10.4",
"ruff==0.4.7",
"aiofiles==24.1.0",
Expand Down
Loading