Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Integrate Emotion Recognition Feature into Jarvis Assistan #266

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions Jarvis.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from database.mongodb import create_connection, login_user
from database.localStorageServer import server
from src.utils.functions import application
from src.emotion_recognition import run_emotion_detection # Import emotion detection logic
from datetime import datetime, timedelta
import streamlit as st

# Initialize session state if not present
if "user" not in st.session_state:
st.session_state.update({
'password': None,
Expand All @@ -12,6 +14,7 @@
'verified': False,
})

# Function to fetch credentials from local storage
def get_credentials():
conn = server()
return (
Expand All @@ -21,16 +24,26 @@ def get_credentials():
conn.getLocalStorageVal("verified"),
)

# Main execution block
if __name__ == "__main__":
today = datetime.now()

# Fetch credentials from local storage if password is None
if st.session_state['password'] is None:
st.session_state['password'], st.session_state['user'], st.session_state['expiration_date'], st.session_state['verified'] = get_credentials()

# Check if the user's session is valid and not expired
if st.session_state['expiration_date'] > today.isoformat() and not st.session_state['verified']:
conn = create_connection()
if login_user(conn, st.session_state['user'][0], st.session_state['password']):
st.session_state['verified'] = True
server().setLocalStorageVal("verified", True)

# Run the application if the user is verified
if st.session_state['verified']:
# Optionally, run emotion detection here after user is verified
run_emotion_detection() # Trigger the emotion recognition (this could be interactive in the UI)

# Run the application logic based on verified user status
application(st.session_state['verified']).run()

1 change: 1 addition & 0 deletions emotion_model.h5
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
model.save('assets/emotion_model.h5') # Save the trained model to 'assets' folder
60 changes: 60 additions & 0 deletions emotion_recognition.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import numpy as np
import cv2
from keras.models import load_model
from keras.utils import np_utils

# Emotion dictionary
emotion_dict = {0: "Angry", 1: "Disgust", 2: "Fear", 3: "Happy", 4: "Sad", 5: "Surprise", 6: "Neutral"}

def preprocess_image(image):
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # Convert to grayscale
image = cv2.resize(image, (48, 48)) # Resize to match model input size
image = image.astype('float32') / 255.0 # Normalize pixel values
image = np.reshape(image, (1, 48, 48, 1)) # Add a channel dimension
return image

def load_emotion_model():
# Load the pre-trained model
model = load_model('assets/emotion_model.h5')
return model

def detect_emotion(frame, model):
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
face_cascade = cv2.CascadeClassifier(cv2.data.haarcascades + 'haarcascade_frontalface_default.xml')

# Detect faces
faces = face_cascade.detectMultiScale(gray, scaleFactor=1.1, minNeighbors=5, minSize=(30, 30))

for (x, y, w, h) in faces:
face = gray[y:y+h, x:x+w]
face = preprocess_image(face) # Preprocess the face for emotion prediction
emotion_pred = model.predict(face) # Get prediction
max_index = np.argmax(emotion_pred[0]) # Get the emotion with highest probability
emotion = emotion_dict[max_index] # Get emotion name
color = (0, 255, 0) # Color for bounding box (green)

# Draw the rectangle and emotion label on the frame
cv2.rectangle(frame, (x, y), (x+w, y+h), color, 2)
cv2.putText(frame, emotion, (x, y-10), cv2.FONT_HERSHEY_SIMPLEX, 1, color, 2)

return frame

def run_emotion_detection():
model = load_emotion_model()
cap = cv2.VideoCapture(0) # Start webcam

while True:
ret, frame = cap.read()
if not ret:
break
frame = detect_emotion(frame, model)
cv2.imshow('Emotion Recognition', frame)

if cv2.waitKey(1) & 0xFF == ord('q'): # Exit if 'q' is pressed
break

cap.release()
cv2.destroyAllWindows()

if __name__ == '__main__':
run_emotion_detection() # Run the emotion detection function when this script is executed
Binary file modified requirements.txt
Binary file not shown.
8 changes: 8 additions & 0 deletions streamlit_app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
import streamlit as st
from src.emotion_recognition import run_emotion_detection

st.title("Emotion Recognition with Jarvis")
st.write("Click the button to start emotion detection.")

if st.button('Start Emotion Recognition'):
run_emotion_detection()
Loading