-
Notifications
You must be signed in to change notification settings - Fork 1
/
app.py
271 lines (214 loc) · 9.69 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
from langchain_experimental.sql import SQLDatabaseChain
from langchain.sql_database import SQLDatabase
from langchain.chat_models import ChatOpenAI
from langchain.prompts.prompt import PromptTemplate
from langchain.callbacks import get_openai_callback
from dataclasses import dataclass
from typing import Literal
import streamlit as st
import os
import openai
# Setting Menu options and web page configurations
st.set_page_config(
page_title="IPL 2023 StatsBot",
page_icon="🏏",
initial_sidebar_state="expanded",
menu_items={
'Get Help': 'https://www.linkedin.com/in/vinodvidhole/',
'About': "**IPL 2023 StatsBot** powered by Azure Open AI, Python, LangChain, SQL, and Streamlit"
}
)
# Adding Sidebar
st.sidebar.title("IPL 2023 Statsbot 🏏")
st.sidebar.markdown("Powered by Azure Open AI, Python, LangChain, SQL, and Streamlit")
st.sidebar.markdown("**Author:** [Vinod Dhole](https://www.linkedin.com/in/vinodvidhole/)")
st.sidebar.markdown("**Source Code:** [GitHub](https://github.com/vinodvidhole/IPLStatsBot)")
# About the App
st.sidebar.subheader("About the App")
st.sidebar.markdown(
"The **IPL Stats Bot** is an advanced chatbot that offers immediate access to custom relational SQL data. "
"It is developed using state-of-the-art technologies such as Azure OpenAI, Python, LangChain, and Streamlit."
)
# Overcoming Limitations
st.sidebar.subheader("Overcoming Limitations")
st.sidebar.markdown(
"Conventional open AI models, including large language models (LLMs), often fall short in delivering "
"real-time answers for the latest events or custom data queries. To address this limitation, our application was designed. "
"We offer OpenAI models access to IPL data in SQL format through LangChain, enabling these models to provide highly "
"accurate responses to specific questions without requiring prior knowledge of the database structure."
)
# Natural Language to SQL
st.sidebar.subheader("Natural Language to SQL")
st.sidebar.markdown(
"Additionally, the application showcases Natural Language to SQL conversion, eliminating the need for SQL expertise "
"to gain data insights from relational databases."
)
# How to Use
st.sidebar.subheader("How to Use")
st.sidebar.markdown(
"To engage with this chatbot, simply ask IPL-related questions for IPL 2023 stats, such as the winner, "
"top run-scorer, leading wicket-taker, or the player with the most Man of the Match awards."
)
@dataclass
class Message:
"""Class for keeping track of a chat message."""
origin: Literal["human", "ai"]
message: str
os.environ["OPENAI_API_KEY"] = st.secrets["openai_api_key"]
openai.api_base = st.secrets["openai_api_endpoint"]
openai.api_version = st.secrets["openai_api_version"]
openai.api_type = st.secrets["openai_api_type"]
openai.api_key = os.getenv("OPENAI_API_KEY")
model_name = st.secrets["openai_model_name"]
deployment_name = st.secrets["openai_deployment_name"]
db_file_name = st.secrets["sqllite_db_name"]
# Global flag variable to track if session state has been initialized
session_state_initialized = False
def load_css():
with open("static/styles.css", "r") as f:
css = f"<style>{f.read()}</style>"
st.markdown(css, unsafe_allow_html=True)
def initialize_session_state():
global session_state_initialized
if not session_state_initialized:
if "history" not in st.session_state:
st.session_state.history = []
if "token_count" not in st.session_state:
st.session_state.token_count = 0
if "conversation" not in st.session_state:
llm = ChatOpenAI(temperature=0,
model_name=model_name,
model_kwargs={"engine":deployment_name},
verbose=True)
st.session_state.conversation = llm
session_state_initialized = True
## for future implementation
def collect_feedback(user_query, model_response, user_rating):
# Save the user's query, model response, and user rating to a feedback database or file
with open("feedback.csv", "a") as feedback_file:
feedback_file.write(f"{user_query},{model_response},{user_rating}\n")
def on_click_callback():
with get_openai_callback() as cb:
human_prompt = st.session_state.human_prompt
try:
llm_response ,_ = answer_question(human_prompt)
##user_rating = st.selectbox("Rate the response:", ["Excellent", "Good", "Fair", "Poor"])
##collect_feedback(human_prompt, llm_response, user_rating)
except Exception as e:
llm_response = f"An error occurred: {str(e)}"
st.session_state.token_count += cb.total_tokens
DEFAULT_TABLES = [
'each_match_records',
'each_ball_records'
]
def get_prompt():
_DEFAULT_TEMPLATE = """Given an input question, first create a syntactically correct {dialect} query to run, then look at the results of the query and return the answer.
Use the following format:
Question: "Question here"
SQLQuery: "SQL Query to run"
SQLResult: "Result of the SQLQuery"
Answer: ""
Only use the following tables:
{table_info}
Some examples of SQL queries that correspond to questions are:
Question: "Who won the final match of IPL 2023"
SQLQuery: "SELECT winner FROM each_match_records WHERE match_type = 'Final' ORDER BY date DESC LIMIT 1"
SQLResult: "[('Chennai Super Kings',)]"
Answer: "Chennai Super Kings"
Question: "Who is the top run-scorer in IPL 2023"
SQLQuery: "SELECT batter, SUM(score) AS total_runs FROM each_ball_records GROUP BY batter ORDER BY total_runs DESC"
SQLResult: "[('Shubman Gill', 916)]"
Answer: "Shubman Gill is the top run-scorer in IPL 2023 with a total of 916 runs."
Question: "Who took most wickets"
SQLQuery: "SLECT bowler, COUNT(*) AS wickets FROM each_ball_records WHERE outcome = 'out' GROUP BY bowler ORDER BY wickets DESC LIMIT 1 "
SQLResult: "[('Mohammed Shami', )]"
Answer: "The bowler who took the most wickets is Mohammed Shami."
Question: {input}"""
PROMPT = PromptTemplate(
input_variables=["input", "table_info", "dialect"], template=_DEFAULT_TEMPLATE
)
return PROMPT
def get_db():
db = SQLDatabase.from_uri("sqlite:///{}".format(db_file_name),
include_tables = DEFAULT_TABLES,
sample_rows_in_table_info=2)
return db
def answer_question(query):
PROMPT = get_prompt()
db = get_db()
# Initialize the session state if it hasn't been done
initialize_session_state()
llm = st.session_state.conversation
try:
db_chain = SQLDatabaseChain.from_llm(llm, db,
prompt=PROMPT,
verbose=True,
return_intermediate_steps=True,
use_query_checker=True
)
result = db_chain(query)
llm_answer = result['result']
sql_cmd = None
for step in result['intermediate_steps']:
if 'sql_cmd' in step:
sql_cmd = step['sql_cmd']
break
#final_op = "{}\nSQL Command: < {} >".format(llm_answer, sql_cmd)
# Append the user query to the conversation history
st.session_state.history.append(Message("human", query))
st.session_state.history.append(Message("ai", llm_answer))
st.session_state.history.append(Message("ai", "SQL Command < {} >".format(sql_cmd)))
return llm_answer, sql_cmd
except Exception as e:
# Append the user query to the conversation history
st.session_state.history.append(Message("human", query))
st.session_state.history.append(Message("ai", "An error occurred< {} >".format(str(e))))
return f"An error occurred: {str(e)}"
load_css()
initialize_session_state()
# Add the logo
path = os.path.dirname(__file__)
# Define the logo path
logo_path = os.path.join(path, "static", "ipl.png")
#user_icon_path = os.path.join(path, "static", "user_icon.png")
#ai_icon_path = os.path.join(path, "static", "ipl.png")
st.image(logo_path, width=200)
st.title("IPL 2023 Statsbot 🏏")
st.markdown("AI chatbot for IPL statistics and data analysis powered by Azure Open AI, Python, LangChain, SQL, and Streamlit")
if __name__ == "__main__":
chat_placeholder = st.container()
prompt_placeholder = st.form("chat-form")
tokens_placeholder = st.empty()
with chat_placeholder:
for chat in st.session_state.history:
#icon_path = ai_icon_path if chat.origin == 'ai' else user_icon_path
icon_url = "https://i.imgur.com/oodRG9v.png" if chat.origin == 'ai' else "https://i.imgur.com/VLL4hVH.png"
div = f"""
<div class="chat-row
{'' if chat.origin == 'ai' else 'row-reverse'}">
<img class="chat-icon" src="{icon_url}"
width=32 height=32>
<div class="chat-bubble
{'ai-bubble' if chat.origin == 'ai' else 'human-bubble'}">
​{chat.message}
</div>
</div>
"""
st.markdown(div, unsafe_allow_html=True)
for _ in range(3):
st.markdown("")
with prompt_placeholder:
st.markdown("**Chat**")
cols = st.columns((6, 1))
cols[0].text_input(
"Chat",
value="Who won the final match of IPL 2023",
label_visibility="collapsed",
key="human_prompt",
)
cols[1].form_submit_button(
"Submit",
type="primary",
on_click=on_click_callback,
)
tokens_placeholder.caption("Used {} tokens".format(st.session_state.token_count))