Commit 8ca84f52 authored by Dhananjaya Jayashanka's avatar Dhananjaya Jayashanka

changers done

parent 65f680f9
...@@ -20,7 +20,9 @@ ...@@ -20,7 +20,9 @@
'oppressed': 'Angry', 'oppressed': 'Angry',
'anguished': 'Sad', 'anguished': 'Sad',
'animated': 'Happy', 'animated': 'Happy',
'happen': 'Sad',
'annoyed': 'Angry', 'annoyed': 'Angry',
'simply': 'Angry',
'appalled': 'Angry', 'appalled': 'Angry',
'beatific': 'Happy', 'beatific': 'Happy',
'bereaved': 'Sad', 'bereaved': 'Sad',
......
# from skimage import io #Import useful libraries
import cv2 import cv2
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
import tensorflow as tf import tensorflow as tf
from keras.preprocessing import image from keras.preprocessing import image
#Load the trained model
Saved_model = tf.keras.models.load_model('emotion_lts.h5') Saved_model = tf.keras.models.load_model('emotion_lts.h5')
Saved_model.summary() Saved_model.summary()
#Prediction categories
objects = ('Angry', 'Happy', 'Sad', 'Neutral') objects = ('Angry', 'Happy', 'Sad', 'Neutral')
vid = cv2.VideoCapture(0) vid = cv2.VideoCapture(0)
...@@ -21,16 +23,15 @@ def emotion_analysis(emotions): ...@@ -21,16 +23,15 @@ def emotion_analysis(emotions):
plt.ylabel('percentage') plt.ylabel('percentage')
plt.title('emotion') plt.title('emotion')
#Predict facial expressions
def get_emotions(filePath): def get_emotions(filePath):
cap = cv2.VideoCapture(filePath) cap = cv2.VideoCapture(filePath)
emotions = [] emotions = [] #Define emotion list
while (cap.isOpened()): while (cap.isOpened()):
try: try:
ret, frame = cap.read() ret, frame = cap.read()
# img = image.load_img(frame,grayscale=True, target_size=(48, 48))
frame = cv2.resize(frame, (48, 48)) frame = cv2.resize(frame, (48, 48))
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
x = image.img_to_array(frame) x = image.img_to_array(frame)
...@@ -39,12 +40,9 @@ def get_emotions(filePath): ...@@ -39,12 +40,9 @@ def get_emotions(filePath):
x /= 255 x /= 255
custom = Saved_model.predict(x) custom = Saved_model.predict(x)
# print(custom[0])
emotion_analysis(custom[0]) emotion_analysis(custom[0])
x = np.array(x, 'float32') x = np.array(x, 'float32')
x = x.reshape([48, 48]); x = x.reshape([48, 48]);
m = 0.000000000000000000001 m = 0.000000000000000000001
a = custom[0] a = custom[0]
for i in range(0, len(a)): for i in range(0, len(a)):
......
...@@ -53,6 +53,7 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route', ...@@ -53,6 +53,7 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route',
}); });
} }
//facial expression diagram
$scope.selectVideoFile = function (e) { $scope.selectVideoFile = function (e) {
$scope.chartLabels = []; $scope.chartLabels = [];
$scope.chartdata = []; $scope.chartdata = [];
...@@ -136,7 +137,7 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route', ...@@ -136,7 +137,7 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route',
console.log(error); console.log(error);
}); });
//emotion graph //emotion graph text analyzing
MainFactory.emotionSentence({ text: $scope.audioText }) MainFactory.emotionSentence({ text: $scope.audioText })
.then(function (response) { .then(function (response) {
console.log(response.data); console.log(response.data);
...@@ -152,7 +153,6 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route', ...@@ -152,7 +153,6 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route',
$scope.chartdata = []; $scope.chartdata = [];
for (let i = 0; i < $scope.emotions.length; i++) { for (let i = 0; i < $scope.emotions.length; i++) {
// $scope.chartLabels.push(i+1);
if ($scope.emotions[i] == 'Angry') { if ($scope.emotions[i] == 'Angry') {
$scope.chartdata.push(1); $scope.chartdata.push(1);
$scope.emotionScore += 0.25; $scope.emotionScore += 0.25;
...@@ -288,6 +288,8 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route', ...@@ -288,6 +288,8 @@ app.controller('MainController', ['$scope', '$rootScope', '$location', '$route',
console.log(error); console.log(error);
}); });
//comparison between the expression prediction results and emotional moment results
MainFactory.textAnalyzeEmotion({ text: $scope.audioText }) MainFactory.textAnalyzeEmotion({ text: $scope.audioText })
.then(function (response) { .then(function (response) {
console.log(response); console.log(response);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment