diff --git a/lumen/ai/agents.py b/lumen/ai/agents.py index e9c4b5c0b..3d46e59a1 100644 --- a/lumen/ai/agents.py +++ b/lumen/ai/agents.py @@ -26,6 +26,7 @@ from ..transforms.sql import SQLOverride, SQLTransform, Transform from ..views import VegaLiteView, View, hvPlotUIView from .analysis import Analysis +from .config import FUZZY_TABLE_LENGTH from .embeddings import Embeddings from .llm import Llm from .memory import memory @@ -39,8 +40,6 @@ ) from .views import AnalysisOutput, LumenOutput, SQLOutput -FUZZY_TABLE_LENGTH = 10 - class Agent(Viewer): """ diff --git a/lumen/ai/assistant.py b/lumen/ai/assistant.py index 6f9b1955d..ecd562bf1 100644 --- a/lumen/ai/assistant.py +++ b/lumen/ai/assistant.py @@ -19,6 +19,7 @@ from .agents import ( Agent, AnalysisAgent, ChatAgent, SQLAgent, ) +from .config import DEMO_MESSAGES, GETTING_STARTED_SUGGESTIONS from .export import export_notebook from .llm import Llama, Llm from .logs import ChatLogs @@ -26,22 +27,6 @@ from .models import Validity from .utils import get_schema, render_template, retry_llm_output -GETTING_STARTED_SUGGESTIONS = [ - "What datasets do you have?", - "Tell me about the dataset.", - "Create a plot of the dataset.", - "Find the min and max of the values.", -] - -DEMO_MESSAGES = [ - "What data is available?", - "Can I see the first one?", - "Tell me about the dataset.", - "What could be interesting to analyze?", - "Perform a SQL query on one of these.", - "Show it to me as a scatter plot." -] - class Assistant(Viewer): """ diff --git a/lumen/ai/config.py b/lumen/ai/config.py new file mode 100644 index 000000000..f04ddef05 --- /dev/null +++ b/lumen/ai/config.py @@ -0,0 +1,40 @@ +from pathlib import Path + +import panel as pn + + +class LlmSetupError(Exception): + """ + Raised when an error occurs during the setup of the LLM. + """ + + +THIS_DIR = Path(__file__).parent + +FUZZY_TABLE_LENGTH = 10 + +GETTING_STARTED_SUGGESTIONS = [ + "What datasets do you have?", + "Tell me about the dataset.", + "Create a plot of the dataset.", + "Find the min and max of the values.", +] + +DEMO_MESSAGES = [ + "What data is available?", + "Can I see the first one?", + "Tell me about the dataset.", + "What could be interesting to analyze?", + "Perform a SQL query on one of these.", + "Show it to me as a scatter plot.", +] + +DEFAULT_EMBEDDINGS_PATH = Path("embeddings") + +UNRECOVERABLE_ERRORS = ( + ImportError, + LlmSetupError, + RecursionError, +) + +pn.chat.ChatStep.min_width = 350 diff --git a/lumen/ai/embeddings.py b/lumen/ai/embeddings.py index 8e549335b..0a527d71f 100644 --- a/lumen/ai/embeddings.py +++ b/lumen/ai/embeddings.py @@ -1,6 +1,6 @@ from pathlib import Path -DEFAULT_PATH = Path("embeddings") +from .config import DEFAULT_EMBEDDINGS_PATH class Embeddings: @@ -14,7 +14,7 @@ def query(self, query_texts: str) -> list: class ChromaDb(Embeddings): - def __init__(self, collection: str, persist_dir: str = DEFAULT_PATH): + def __init__(self, collection: str, persist_dir: str = DEFAULT_EMBEDDINGS_PATH): import chromadb self.client = chromadb.PersistentClient(path=str(persist_dir / collection)) self.collection = self.client.get_or_create_collection(collection) diff --git a/lumen/ai/utils.py b/lumen/ai/utils.py index 8b2e98c78..4a50db673 100644 --- a/lumen/ai/utils.py +++ b/lumen/ai/utils.py @@ -11,21 +11,7 @@ from lumen.pipeline import Pipeline from lumen.sources.base import Source -THIS_DIR = Path(__file__).parent - - -class LlmSetupError(Exception): - """ - Raised when an error occurs during the setup of the LLM. - """ - - - -UNRECOVERABLE_ERRORS = ( - ImportError, - LlmSetupError, - RecursionError, -) +from .config import THIS_DIR, UNRECOVERABLE_ERRORS def render_template(template, **context):