1
1
import streamlit as st
2
- from streamlit_chat import message
3
- from streamlit_extras .colored_header import colored_header
4
2
from streamlit_extras .add_vertical_space import add_vertical_space
5
3
from utils import Smart_Agent , HR_PERSONA , HR_AVAILABLE_FUNCTIONS , HR_FUNCTIONS_SPEC
6
- import concurrent .futures
7
4
import time
8
5
import random
9
- import openai
10
6
import os
11
7
from pathlib import Path
12
8
import json
58
54
st .session_state ['input' ] = ""
59
55
60
56
61
-
62
-
63
- # User input
64
- ## Function for taking user provided prompt as input
65
- def clear ():
66
- st .session_state .input_var = st .session_state .input
67
- st .session_state .input = ''
68
-
69
- def get_text ():
70
- st .text_input ("You: " , "" , key = "input" , on_change = clear ())
71
- return st .session_state .input_var
72
- ## Applying the user input box
73
- # with input_container:
74
- user_input = get_text ()
57
+ user_input = st .chat_input ("You:" )
75
58
76
59
## Conditional display of AI generated responses as a function of user provided prompts
77
60
history = st .session_state ['history' ]
78
61
79
62
if len (history ) > 0 :
80
- bot_history = [item ['content' ] for item in history if (item ['role' ] == 'assistant' ) and (item .get ("name" ) is None )]
81
- #trim history
82
- # bot_history = bot_history[-MAX_HIST-1:]
83
- user_history = [item ['content' ] for item in history if item ['role' ] == 'user' ]
84
- # user_history = user_history[-MAX_HIST-1:]
85
-
86
- for i in range (len (bot_history )):
87
- if i > 0 :
88
- if len (user_history ) > i - 1 :
89
- message (user_history [i - 1 ], is_user = True , key = str (i ) + '_user' ) #this is because the bot starts first.
90
- message (bot_history [i ], key = str (i ))
63
+ for message in history :
64
+ if message .get ("role" ) != "system" and message .get ("name" ) is None :
65
+ with st .chat_message (message ["role" ]):
66
+ st .markdown (message ["content" ])
91
67
else :
92
68
history , agent_response = hr_agent .run (user_input = None )
93
- message (agent_response , is_user = False , key = str (0 ) + '_assistant' )
69
+ # message(agent_response, is_user=False, key=str(0) + '_assistant')
70
+ with st .chat_message ("assistant" ):
71
+ st .markdown (agent_response )
94
72
user_history = []
95
73
if user_input :
96
- message (user_input , is_user = True ,key = str (len (user_history )+ 1 )+ '_user' )
97
- history , agent_response = hr_agent .run (user_input = user_input , conversation = history )
98
- message (agent_response , is_user = False ,key = str (len (bot_history )+ 1 )+ '_assistant' )
74
+ with st .chat_message ("user" ):
75
+ st .markdown (user_input )
76
+ stream_out , history , agent_response = hr_agent .run (user_input = user_input , conversation = history , stream = True )
77
+
78
+ with st .chat_message ("assistant" ):
79
+ if stream_out :
80
+ message_placeholder = st .empty ()
81
+ full_response = ""
82
+ for response in agent_response :
83
+ if len (response .choices )> 0 :
84
+ full_response += response .choices [0 ].delta .get ("content" , "" )
85
+ message_placeholder .markdown (full_response + "▌" )
86
+ message_placeholder .markdown (full_response )
87
+ history .append ({"role" : "assistant" , "content" : full_response })
88
+ else :
89
+ st .markdown (agent_response )
90
+
99
91
st .session_state ['history' ] = history
0 commit comments