From 62e21dc481599fbb4e883922acdd61e795eace4e Mon Sep 17 00:00:00 2001 From: Deep Nayak Date: Fri, 20 Sep 2024 00:28:54 +0530 Subject: [PATCH] Minor Improvements --- INSTALL.md | 2 +- src/agent/test_query_pipeline.py | 2 +- src/app.py | 2 +- ui/src/app/[id]/page.tsx | 4 ++-- ui/src/app/page.tsx | 6 +++--- ui/src/lib/model-helper.ts | 16 ++++++++-------- 6 files changed, 16 insertions(+), 16 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index 99dd26c..753086b 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -51,7 +51,7 @@ curl -fsSL https://ollama.com/install.sh | sh Run the Ollama application: ```bash -ollama run lama3.1:8b-instruct-q4_0 +ollama run llama3.1:8b-instruct-q4_0 ``` ## Step 7: Start the Flask Application diff --git a/src/agent/test_query_pipeline.py b/src/agent/test_query_pipeline.py index 20cbad4..88e987d 100644 --- a/src/agent/test_query_pipeline.py +++ b/src/agent/test_query_pipeline.py @@ -20,7 +20,7 @@ load_dotenv() Settings.llm = Ollama( - model=os.getenv("OLLAMA_MODEL_NAME", "llama3.1:8b-instruct-q4_0"), + model=os.getenv("OLLAMA_MODEL_NAME", "lllama3.1:8b-instruct-q4_0"), base_url=os.getenv("OLLAMA_HOST_URL", "http://127.0.0.1:11434"), request_timeout=36000.0, ) diff --git a/src/app.py b/src/app.py index 3375dab..d885e42 100644 --- a/src/app.py +++ b/src/app.py @@ -20,7 +20,7 @@ from agent.query_pipeline import qp Settings.llm = Ollama( - model=os.getenv("OLLAMA_MODEL_NAME", "llama3.1:8b-instruct-q4_0"), + model=os.getenv("OLLAMA_MODEL_NAME", "lllama3.1:8b-instruct-q4_0"), base_url=os.getenv("OLLAMA_HOST_URL", "http://127.0.0.1:11434"), request_timeout=36000.0, ) diff --git a/ui/src/app/[id]/page.tsx b/ui/src/app/[id]/page.tsx index 7c1ca35..7f65cf3 100644 --- a/ui/src/app/[id]/page.tsx +++ b/ui/src/app/[id]/page.tsx @@ -147,9 +147,9 @@ export default function Page({ params }: { params: { id: string } }) { // break; } else if (currentState['done']) { - addMessage({ role: "assistant", content: 'GoaT Link!', id: chatId }); - addMessage({ role: "assistant", content: "Webpage Details:\nHere is the JSON curated after parsing the above webpage:\n\n" + currentState['api_response'], id: chatId }); addMessage({ role: "assistant", content: currentState['markdown'], id: chatId }); + addMessage({ role: "assistant", content: 'GoaT Link!', id: chatId }); + addMessage({ role: "user", content: "Webpage Details:\nHere is the JSON curated after parsing the above webpage:\n\n" + currentState['api_response'], id: chatId }); setMessages([...messages]); setLoadingSubmit(false); break; diff --git a/ui/src/app/page.tsx b/ui/src/app/page.tsx index e721bd4..c5f37ee 100644 --- a/ui/src/app/page.tsx +++ b/ui/src/app/page.tsx @@ -123,9 +123,9 @@ export default function Home() { // break; } else if (currentState['done']) { - addMessage({ role: "assistant", content: 'GoaT Link!', id: chatId }); - addMessage({ role: "assistant", content: "Webpage Details:\nHere is the JSON curated after parsing the above webpage:\n\n" + JSON.stringify(currentState['api_response']), id: chatId }); addMessage({ role: "assistant", content: currentState['markdown'], id: chatId }); + addMessage({ role: "assistant", content: 'GoaT Link!', id: chatId }); + addMessage({ role: "user", content: "Webpage Details:\nHere is the JSON curated after parsing the above webpage:\n\n" + JSON.stringify(currentState['api_response']), id: chatId }); setMessages([...messages]); setLoadingSubmit(false); break; @@ -249,7 +249,7 @@ export default function Home() { /> - Welcome to Ollama! + Welcome to Goat NLP! Enter your name to get started. This is just to personalize your experience. diff --git a/ui/src/lib/model-helper.ts b/ui/src/lib/model-helper.ts index c1b9250..89c0698 100644 --- a/ui/src/lib/model-helper.ts +++ b/ui/src/lib/model-helper.ts @@ -1,9 +1,9 @@ export function getSelectedModel(): string { - if (typeof window !== 'undefined') { - const storedModel = localStorage.getItem('selectedModel'); - return storedModel || 'gemma:2b'; - } else { - // Default model - return 'gemma:2b'; - } - } \ No newline at end of file + if (typeof window !== 'undefined') { + const storedModel = localStorage.getItem('selectedModel'); + return storedModel || 'llama3.1:8b-instruct-q4_0'; + } else { + // Default model + return 'llama3.1:8b-instruct-q4_0'; + } +} \ No newline at end of file