Skip to content

Commit

Permalink
added streamlit app py file
Browse files Browse the repository at this point in the history
  • Loading branch information
bhattbhavesh91 authored Oct 31, 2023
1 parent e22f93e commit 3c61060
Showing 1 changed file with 109 additions and 0 deletions.
109 changes: 109 additions & 0 deletions llama2-pdf-q-a-streamlit-app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
import json
import os
import streamlit as st
from cassandra.auth import PlainTextAuthProvider
from cassandra.cluster import Cluster
from llama_index import ServiceContext
from llama_index import set_global_service_context
from llama_index import VectorStoreIndex, SimpleDirectoryReader, StorageContext
from llama_index.embeddings import GradientEmbedding
from llama_index.llms import GradientBaseModelLLM
from llama_index.vector_stores import CassandraVectorStore
from copy import deepcopy
from tempfile import NamedTemporaryFile

@st.cache_resource
def create_datastax_connection():

cloud_config= {'secure_connect_bundle': 'secure-connect-bhavesh-astra-test.zip'}

with open("bhavesh_astra_test-token.json") as f:
secrets = json.load(f)

CLIENT_ID = secrets["clientId"]
CLIENT_SECRET = secrets["secret"]

auth_provider = PlainTextAuthProvider(CLIENT_ID, CLIENT_SECRET)
cluster = Cluster(cloud=cloud_config, auth_provider=auth_provider)
astra_session = cluster.connect()
return astra_session

def main():

index_placeholder = None
st.set_page_config(page_title = "Chat with your PDF using Llama2 & Llama Index", page_icon="🦙")
st.header('🦙 Chat with your PDF using Llama2 model & Llama Index')

if "conversation" not in st.session_state:
st.session_state.conversation = None

if "activate_chat" not in st.session_state:
st.session_state.activate_chat = False

if "messages" not in st.session_state:
st.session_state.messages = []

for message in st.session_state.messages:
with st.chat_message(message["role"], avatar = message['avatar']):
st.markdown(message["content"])

session = create_datastax_connection()

os.environ['GRADIENT_ACCESS_TOKEN'] = "Enter your Token"
os.environ['GRADIENT_WORKSPACE_ID'] = "Enter your Workspace ID"

llm = GradientBaseModelLLM(base_model_slug="llama2-7b-chat", max_tokens=400)

embed_model = GradientEmbedding(
gradient_access_token = os.environ["GRADIENT_ACCESS_TOKEN"],
gradient_workspace_id = os.environ["GRADIENT_WORKSPACE_ID"],
gradient_model_slug="bge-large")

service_context = ServiceContext.from_defaults(
llm = llm,
embed_model = embed_model,
chunk_size=256)

set_global_service_context(service_context)

with st.sidebar:
st.subheader('Upload Your PDF File')
docs = st.file_uploader('⬆️ Upload your PDF & Click to process',
accept_multiple_files = False,
type=['pdf'])
if st.button('Process'):
with NamedTemporaryFile(dir='.', suffix='.pdf') as f:
f.write(docs.getbuffer())
with st.spinner('Processing'):
documents = SimpleDirectoryReader(".").load_data()
index = VectorStoreIndex.from_documents(documents,
service_context=service_context)
query_engine = index.as_query_engine()
if "query_engine" not in st.session_state:
st.session_state.query_engine = query_engine
st.session_state.activate_chat = True

if st.session_state.activate_chat == True:
if prompt := st.chat_input("Ask your question from the PDF?"):
with st.chat_message("user", avatar = '👨🏻'):
st.markdown(prompt)
st.session_state.messages.append({"role": "user",
"avatar" :'👨🏻',
"content": prompt})

query_index_placeholder = st.session_state.query_engine
pdf_response = query_index_placeholder.query(prompt)
cleaned_response = pdf_response.response
with st.chat_message("assistant", avatar='🤖'):
st.markdown(cleaned_response)
st.session_state.messages.append({"role": "assistant",
"avatar" :'🤖',
"content": cleaned_response})
else:
st.markdown(
'Upload your PDFs to chat'
)


if __name__ == '__main__':
main()

0 comments on commit 3c61060

Please sign in to comment.