Skip to content

Commit

Permalink
Research agent directory in examples
Browse files Browse the repository at this point in the history
  • Loading branch information
whimo committed Apr 26, 2024
1 parent 43cd69c commit bb691e8
Show file tree
Hide file tree
Showing 4 changed files with 293 additions and 302 deletions.
13 changes: 7 additions & 6 deletions motleycrew/tool/llm_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,27 +18,28 @@ def __init__(
description: str,
prompt: str | BasePromptTemplate,
llm: Optional[BaseLanguageModel] = None,
input_description: Optional[str] = "Input for the tool.",
):
langchain_tool = create_llm_langchain_tool(name, description, prompt, llm)
langchain_tool = create_llm_langchain_tool(
name=name, description=description, prompt=prompt, llm=llm, input_description=input_description
)
super().__init__(langchain_tool)


def create_llm_langchain_tool(
name: str,
description: str,
prompt: str | BasePromptTemplate,
llm: Optional[BaseLanguageModel] = None,
input_description: Optional[str] = "Input for the tool.",
llm: Optional[BaseLanguageModel],
input_description: Optional[str],
):
if llm is None:
llm = init_llm(llm_framework=LLMFramework.LANGCHAIN)

if not isinstance(prompt, BasePromptTemplate):
prompt = PromptTemplate.from_template(prompt)

assert (
len(prompt.input_variables) == 1
), "Prompt must contain exactly one input variable"
assert len(prompt.input_variables) == 1, "Prompt must contain exactly one input variable"
input_var = prompt.input_variables[0]

class LLMToolInput(BaseModel):
Expand Down
Loading

0 comments on commit bb691e8

Please sign in to comment.