-
Notifications
You must be signed in to change notification settings - Fork 36
/
Copy pathapp.py
206 lines (169 loc) Β· 6.88 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
import streamlit as st
import os
import dotenv
import uuid
# check if it's linux so it works on Streamlit Cloud
if os.name == 'posix':
__import__('pysqlite3')
import sys
sys.modules['sqlite3'] = sys.modules.pop('pysqlite3')
from langchain_openai import ChatOpenAI, AzureChatOpenAI
from langchain_anthropic import ChatAnthropic
from langchain.schema import HumanMessage, AIMessage
from rag_methods import (
load_doc_to_db,
load_url_to_db,
stream_llm_response,
stream_llm_rag_response,
)
dotenv.load_dotenv()
if "AZ_OPENAI_API_KEY" not in os.environ:
MODELS = [
# "openai/o1-mini",
"openai/gpt-4o",
"openai/gpt-4o-mini",
"anthropic/claude-3-5-sonnet-20240620",
]
else:
MODELS = ["azure-openai/gpt-4o"]
st.set_page_config(
page_title="RAG LLM app?",
page_icon="π",
layout="centered",
initial_sidebar_state="expanded"
)
# --- Header ---
st.html("""<h2 style="text-align: center;">ππ <i> Do your LLM even RAG bro? </i> π€π¬</h2>""")
# --- Initial Setup ---
if "session_id" not in st.session_state:
st.session_state.session_id = str(uuid.uuid4())
if "rag_sources" not in st.session_state:
st.session_state.rag_sources = []
if "messages" not in st.session_state:
st.session_state.messages = [
{"role": "user", "content": "Hello"},
{"role": "assistant", "content": "Hi there! How can I assist you today?"}
]
# --- Side Bar LLM API Tokens ---
with st.sidebar:
if "AZ_OPENAI_API_KEY" not in os.environ:
default_openai_api_key = os.getenv("OPENAI_API_KEY") if os.getenv("OPENAI_API_KEY") is not None else "" # only for development environment, otherwise it should return None
with st.popover("π OpenAI"):
openai_api_key = st.text_input(
"Introduce your OpenAI API Key (https://platform.openai.com/)",
value=default_openai_api_key,
type="password",
key="openai_api_key",
)
default_anthropic_api_key = os.getenv("ANTHROPIC_API_KEY") if os.getenv("ANTHROPIC_API_KEY") is not None else ""
with st.popover("π Anthropic"):
anthropic_api_key = st.text_input(
"Introduce your Anthropic API Key (https://console.anthropic.com/)",
value=default_anthropic_api_key,
type="password",
key="anthropic_api_key",
)
else:
openai_api_key, anthropic_api_key = None, None
st.session_state.openai_api_key = None
az_openai_api_key = os.getenv("AZ_OPENAI_API_KEY")
st.session_state.az_openai_api_key = az_openai_api_key
# --- Main Content ---
# Checking if the user has introduced the OpenAI API Key, if not, a warning is displayed
missing_openai = openai_api_key == "" or openai_api_key is None or "sk-" not in openai_api_key
missing_anthropic = anthropic_api_key == "" or anthropic_api_key is None
if missing_openai and missing_anthropic and ("AZ_OPENAI_API_KEY" not in os.environ):
st.write("#")
st.warning("β¬
οΈ Please introduce an API Key to continue...")
else:
# Sidebar
with st.sidebar:
st.divider()
models = []
for model in MODELS:
if "openai" in model and not missing_openai:
models.append(model)
elif "anthropic" in model and not missing_anthropic:
models.append(model)
elif "azure-openai" in model:
models.append(model)
st.selectbox(
"π€ Select a Model",
options=models,
key="model",
)
cols0 = st.columns(2)
with cols0[0]:
is_vector_db_loaded = ("vector_db" in st.session_state and st.session_state.vector_db is not None)
st.toggle(
"Use RAG",
value=is_vector_db_loaded,
key="use_rag",
disabled=not is_vector_db_loaded,
)
with cols0[1]:
st.button("Clear Chat", on_click=lambda: st.session_state.messages.clear(), type="primary")
st.header("RAG Sources:")
# File upload input for RAG with documents
st.file_uploader(
"π Upload a document",
type=["pdf", "txt", "docx", "md"],
accept_multiple_files=True,
on_change=load_doc_to_db,
key="rag_docs",
)
# URL input for RAG with websites
st.text_input(
"π Introduce a URL",
placeholder="https://example.com",
on_change=load_url_to_db,
key="rag_url",
)
with st.expander(f"π Documents in DB ({0 if not is_vector_db_loaded else len(st.session_state.rag_sources)})"):
st.write([] if not is_vector_db_loaded else [source for source in st.session_state.rag_sources])
# Main chat app
model_provider = st.session_state.model.split("/")[0]
if model_provider == "openai":
llm_stream = ChatOpenAI(
api_key=openai_api_key,
model_name=st.session_state.model.split("/")[-1],
temperature=0.3,
streaming=True,
)
elif model_provider == "anthropic":
llm_stream = ChatAnthropic(
api_key=anthropic_api_key,
model=st.session_state.model.split("/")[-1],
temperature=0.3,
streaming=True,
)
elif model_provider == "azure-openai":
llm_stream = AzureChatOpenAI(
azure_endpoint=os.getenv("AZ_OPENAI_ENDPOINT"),
openai_api_version="2024-02-15-preview",
model_name=st.session_state.model.split("/")[-1],
openai_api_key=os.getenv("AZ_OPENAI_API_KEY"),
openai_api_type="azure",
temperature=0.3,
streaming=True,
)
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
if prompt := st.chat_input("Your message"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
messages = [HumanMessage(content=m["content"]) if m["role"] == "user" else AIMessage(content=m["content"]) for m in st.session_state.messages]
if not st.session_state.use_rag:
st.write_stream(stream_llm_response(llm_stream, messages))
else:
st.write_stream(stream_llm_rag_response(llm_stream, messages))
with st.sidebar:
st.divider()
st.video("https://youtu.be/abMwFViFFhI")
st.write("π[Medium Blog](https://medium.com/@enricdomingo/program-a-rag-llm-chat-app-with-langchain-streamlit-o1-gtp-4o-and-claude-3-5-529f0f164a5e)")
st.write("π[GitHub Repo](https://github.com/enricd/rag_llm_app)")