Commit a2d180df authored by I.K Seneviratne's avatar I.K Seneviratne

Merge branch 'monitoring_student_behavior_IT17138000' into 'QA_RELEASE'

Monitoring student behavior it17138000

See merge request !17
parents 6a494dd8 07796831
from rest_framework.permissions import IsAuthenticated, IsAdminUser from rest_framework.permissions import IsAuthenticated, IsAdminUser
from rest_framework.authentication import SessionAuthentication, BasicAuthentication from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from MonitorLecturerApp.models import LectureRecordedVideo, LecturerVideoMetaData
from MonitorLecturerApp.serializers import LectureRecordedVideoSerializer, LecturerVideoMetaDataSerializer
from .MongoModels import * from .MongoModels import *
from rest_framework.views import * from rest_framework.views import *
from .ImageOperations import saveImage from .ImageOperations import saveImage
...@@ -1271,3 +1273,178 @@ class GetLectureGazeSummary(APIView): ...@@ -1271,3 +1273,178 @@ class GetLectureGazeSummary(APIView):
"frame_group_percentages": frame_group_percentages, "frame_group_percentages": frame_group_percentages,
"gaze_labels": gaze_labels "gaze_labels": gaze_labels
}) })
# =====OTHERS=====
class GetLecturerRecordedVideo(APIView):
def get(self, request):
lecturer = request.query_params.get('lecturer')
subject = request.query_params.get('subject')
date = request.query_params.get('date')
# retrieve data
lec_recorded_video = LectureRecordedVideo.objects.filter(lecturer_id=lecturer, subject__subject_code=subject, lecturer_date=date)
lec_recorded_video_ser = LectureRecordedVideoSerializer(lec_recorded_video, many=True)
lec_recorded_video_data = lec_recorded_video_ser.data[0]
video_name = lec_recorded_video_data['lecture_video_name']
print('lecturer recorded video name: ', video_name)
return Response({
"video_name": video_name
})
# this API will get lecture activity correlations
class GetLectureActivityCorrelations(APIView):
def get(self, request):
option = request.query_params.get('option')
lecturer = request.query_params.get('lecturer')
int_option = int(option)
current_date = datetime.datetime.now().date()
option_date = datetime.timedelta(days=int_option)
previous_date = current_date - option_date
individual_lec_activities = []
activity_correlations = []
# retrieving lecture activities
lec_activity = LectureActivity.objects.filter(
lecture_video_id__date__gte=previous_date,
lecture_video_id__date__lte=current_date,
lecture_video_id__lecturer=lecturer
)
if len(lec_activity) > 0:
isRecordFound = True
activity_serializer = LectureActivitySerializer(lec_activity, many=True)
activity_data = activity_serializer.data
_, individual_lec_activities, _ = ar.get_student_activity_summary_for_period(activity_data)
# retrieving lecturer recorded activities
lec_recorded_activity = LecturerVideoMetaData.objects.filter(
lecturer_video_id__lecturer_date__gte=previous_date,
lecturer_video_id__lecturer_date__lte=current_date,
lecturer_video_id__lecturer=lecturer
)
if len(lec_recorded_activity) > 0:
lec_recorded_activity_ser = LecturerVideoMetaDataSerializer(lec_recorded_activity, many=True)
lec_recorded_activity_data = lec_recorded_activity_ser.data
activity_correlations = ar.get_activity_correlations(individual_lec_activities, lec_recorded_activity_data)
print('activity correlations: ', activity_correlations)
return Response({
"correlations": activity_correlations
})
# this API will get lecture emotion correlations
class GetLectureEmotionCorrelations(APIView):
def get(self, request):
option = request.query_params.get('option')
lecturer = request.query_params.get('lecturer')
int_option = int(option)
current_date = datetime.datetime.now().date()
option_date = datetime.timedelta(days=int_option)
previous_date = current_date - option_date
individual_lec_emotions = []
emotion_correlations = []
# retrieving lecture activities
lec_emotion = LectureEmotionReport.objects.filter(
lecture_video_id__date__gte=previous_date,
lecture_video_id__date__lte=current_date,
lecture_video_id__lecturer=lecturer
)
# if there are lecture emotions
if len(lec_emotion) > 0:
emotion_serializer = LectureEmotionSerializer(lec_emotion, many=True)
emotion_data = emotion_serializer.data
_, individual_lec_emotions, _ = ed.get_student_emotion_summary_for_period(emotion_data)
# retrieving lecturer recorded activities
lec_recorded_activity = LecturerVideoMetaData.objects.filter(
lecturer_video_id__lecturer_date__gte=previous_date,
lecturer_video_id__lecturer_date__lte=current_date,
lecturer_video_id__lecturer=lecturer
)
# if there are any recorded lectures
if len(lec_recorded_activity) > 0:
lec_recorded_activity_ser = LecturerVideoMetaDataSerializer(lec_recorded_activity, many=True)
lec_recorded_activity_data = lec_recorded_activity_ser.data
emotion_correlations = ed.get_emotion_correlations(individual_lec_emotions, lec_recorded_activity_data)
return Response({
"correlations": emotion_correlations
})
# this API will get lecture gaze correlations
class GetLectureGazeCorrelations(APIView):
def get(self, request):
option = request.query_params.get('option')
lecturer = request.query_params.get('lecturer')
int_option = int(option)
current_date = datetime.datetime.now().date()
option_date = datetime.timedelta(days=int_option)
previous_date = current_date - option_date
individual_lec_gaze = []
gaze_correlations = []
# retrieving lecture activities
lec_gaze = LectureGazeEstimation.objects.filter(
lecture_video_id__date__gte=previous_date,
lecture_video_id__date__lte=current_date,
lecture_video_id__lecturer=lecturer
)
# if there are gaze estimations
if len(lec_gaze) > 0:
gaze_serializer = LectureGazeEstimationSerializer(lec_gaze, many=True)
gaze_data = gaze_serializer.data
_, individual_lec_gaze, _ = hge.get_student_gaze_estimation_summary_for_period(gaze_data)
# retrieving lecturer recorded activities
lec_recorded_activity = LecturerVideoMetaData.objects.filter(
lecturer_video_id__lecturer_date__gte=previous_date,
lecturer_video_id__lecturer_date__lte=current_date,
lecturer_video_id__lecturer=lecturer
)
# if there are any recorded lectures
if len(lec_recorded_activity) > 0:
lec_recorded_activity_ser = LecturerVideoMetaDataSerializer(lec_recorded_activity, many=True)
lec_recorded_activity_data = lec_recorded_activity_ser.data
# find the correlations between lecture gaze estimations and recorded lecture
gaze_correlations = hge.get_gaze_correlations(individual_lec_gaze, lec_recorded_activity_data)
return Response({
"correlations": gaze_correlations
})
...@@ -11,12 +11,12 @@ from . models import VideoMeta ...@@ -11,12 +11,12 @@ from . models import VideoMeta
from . logic import custom_sorter as cs from . logic import custom_sorter as cs
from .logic import id_generator as ig from .logic import id_generator as ig
from .logic import activity_recognition as ar from .logic import activity_recognition as ar
from .logic import utilities as ut
# emotion recognition method
from .serializers import LectureEmotionSerializer from .serializers import LectureEmotionSerializer
import pandas as pd
# emotion recognition method
def emotion_recognition(classifier, face_classifier, image): def emotion_recognition(classifier, face_classifier, image):
label = "" label = ""
class_labels = ['Angry', 'Happy', 'Neutral', 'Sad', 'Surprise'] class_labels = ['Angry', 'Happy', 'Neutral', 'Sad', 'Surprise']
...@@ -686,3 +686,79 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict): ...@@ -686,3 +686,79 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict):
# save # save
new_lec_emotion_frame_groupings.save() new_lec_emotion_frame_groupings.save()
# this method will get emotion correlations
def get_emotion_correlations(individual_lec_emotions, lec_recorded_activity_data):
# this variable will be used to store the correlations
correlations = []
limit = 10
data_index = ['lecture-{}'.format(i + 1) for i in range(len(individual_lec_emotions))]
# student activity labels
student_emotion_labels = ['Happy', 'Sad', 'Angry', 'Surprise', 'Neutral']
lecturer_activity_labels = ['seated', 'standing', 'walking']
# lecturer recorded data list (lecturer)
sitting_perct_list = []
standing_perct_list = []
walking_perct_list = []
# lecture activity data list (student)
happy_perct_list = []
sad_perct_list = []
angry_perct_list = []
surprise_perct_list = []
neutral_perct_list = []
# loop through the lecturer recorded data (lecturer)
for data in lec_recorded_activity_data:
sitting_perct_list.append(int(data['seated_count']))
standing_perct_list.append(int(data['standing_count']))
walking_perct_list.append(int(data['walking_count']))
# loop through the lecturer recorded data (student)
for data in individual_lec_emotions:
happy_perct_list.append(int(data['happy_perct']))
sad_perct_list.append(int(data['sad_perct']))
angry_perct_list.append(int(data['angry_perct']))
surprise_perct_list.append(int(data['surprise_perct']))
neutral_perct_list.append(int(data['neutral_perct']))
corr_data = {'Happy': happy_perct_list, 'Sad': sad_perct_list, 'Angry': angry_perct_list, 'Surprise': surprise_perct_list, 'Neutral': neutral_perct_list,
'seated': sitting_perct_list, 'standing': standing_perct_list, 'walking': walking_perct_list}
# create the dataframe
df = pd.DataFrame(corr_data, index=data_index)
# calculate the correlation
pd_series = ut.get_top_abs_correlations(df, limit)
print('====correlated variables=====')
print(pd_series)
for i in range(limit):
# this dictionary will get the pandas.Series object's indices and values separately
corr_dict = {}
index = pd_series.index[i]
# check whether the first index is a student activity
isStudentEmotion = index[0] in student_emotion_labels
# check whether the second index is a lecturer activity
isLecturerAct = index[1] in lecturer_activity_labels
# if both are student and lecturer activities, add to the dictionary
if isStudentEmotion & isLecturerAct:
corr_dict['index'] = index
corr_dict['value'] = pd_series.values[i]
# append the dictionary to the 'correlations' list
correlations.append(corr_dict)
# return the list
return correlations
...@@ -9,6 +9,9 @@ from .custom_sorter import * ...@@ -9,6 +9,9 @@ from .custom_sorter import *
from ..MongoModels import * from ..MongoModels import *
from ..serializers import * from ..serializers import *
from . import id_generator as ig from . import id_generator as ig
from . import utilities as ut
import pandas as pd
def activity_recognition(video_path): def activity_recognition(video_path):
...@@ -849,3 +852,77 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict): ...@@ -849,3 +852,77 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict):
# save # save
new_lec_activity_frame_groupings.save() new_lec_activity_frame_groupings.save()
# this method will get activity correlations
def get_activity_correlations(individual_lec_activities, lec_recorded_activity_data):
# this variable will be used to store the correlations
correlations = []
limit = 10
data_index = ['lecture-{}'.format(i+1) for i in range(len(individual_lec_activities))]
# student activity labels
student_activity_labels = ['phone checking', 'listening', 'note taking']
lecturer_activity_labels = ['seated', 'standing', 'walking']
# lecturer recorded data list (lecturer)
sitting_perct_list = []
standing_perct_list = []
walking_perct_list = []
# lecture activity data list (student)
phone_perct_list = []
listen_perct_list = []
note_perct_list = []
# loop through the lecturer recorded data (lecturer)
for data in lec_recorded_activity_data:
sitting_perct_list.append(int(data['seated_count']))
standing_perct_list.append(int(data['standing_count']))
walking_perct_list.append(int(data['walking_count']))
# loop through the lecturer recorded data (student)
for data in individual_lec_activities:
phone_perct_list.append(int(data['phone_perct']))
listen_perct_list.append(int(data['listening_perct']))
note_perct_list.append(int(data['writing_perct']))
corr_data = {'phone checking': phone_perct_list, 'listening': listen_perct_list, 'note taking': note_perct_list,
'seated': sitting_perct_list, 'standing': standing_perct_list, 'walking': walking_perct_list}
# create the dataframe
df = pd.DataFrame(corr_data, index=data_index)
# calculate the correlation
pd_series = ut.get_top_abs_correlations(df, limit)
print('====correlated variables=====')
print(pd_series)
for i in range(limit):
# this dictionary will get the pandas.Series object's indices and values separately
corr_dict = {}
index = pd_series.index[i]
# check whether the first index is a student activity
isStudentAct = index[0] in student_activity_labels
# check whether the second index is a lecturer activity
isLecturerAct = index[1] in lecturer_activity_labels
# if both are student and lecturer activities, add to the doctionary
if isStudentAct & isLecturerAct:
corr_dict['index'] = index
corr_dict['value'] = pd_series.values[i]
# append the dictionary to the 'correlations' list
correlations.append(corr_dict)
# return the list
return correlations
...@@ -15,10 +15,12 @@ from . face_landmarks import get_landmark_model, detect_marks ...@@ -15,10 +15,12 @@ from . face_landmarks import get_landmark_model, detect_marks
import os import os
import shutil import shutil
import math import math
import pandas as pd
from ..MongoModels import * from ..MongoModels import *
from ..serializers import * from ..serializers import *
from . import id_generator as ig from . import id_generator as ig
from . import utilities as ut
def get_2d_points(img, rotation_vector, translation_vector, camera_matrix, val): def get_2d_points(img, rotation_vector, translation_vector, camera_matrix, val):
...@@ -964,3 +966,77 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict): ...@@ -964,3 +966,77 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict):
# save # save
new_lec_gaze_frame_groupings.save() new_lec_gaze_frame_groupings.save()
# this method will get gaze estimation correlations
def get_gaze_correlations(individual_lec_gaze, lec_recorded_activity_data):
# this variable will be used to store the correlations
correlations = []
limit = 10
data_index = ['lecture-{}'.format(i + 1) for i in range(len(individual_lec_gaze))]
# student gaze labels
student_gaze_labels = ['Up and Right', 'Up and Left', 'Down and Right', 'Down and Left', 'Front']
lecturer_activity_labels = ['seated', 'standing', 'walking']
# lecturer recorded data list (lecturer)
sitting_perct_list = []
standing_perct_list = []
walking_perct_list = []
# lecture activity data list (student)
upright_perct_list = []
upleft_perct_list = []
downright_perct_list = []
downleft_perct_list = []
front_perct_list = []
# loop through the lecturer recorded data (lecturer)
for data in lec_recorded_activity_data:
sitting_perct_list.append(int(data['seated_count']))
standing_perct_list.append(int(data['standing_count']))
walking_perct_list.append(int(data['walking_count']))
# loop through the lecturer recorded data (student)
for data in individual_lec_gaze:
upright_perct_list.append(int(data['looking_up_and_right_perct']))
upleft_perct_list.append(int(data['looking_up_and_left_perct']))
downright_perct_list.append(int(data['looking_down_and_right_perct']))
downleft_perct_list.append(int(data['looking_down_and_left_perct']))
front_perct_list.append(int(data['looking_front_perct']))
corr_data = {'Up and Right': upright_perct_list, 'Up and Left': upleft_perct_list, 'Down and Right': downright_perct_list,
'Down and Left': downleft_perct_list, 'Front': front_perct_list,
'seated': sitting_perct_list, 'standing': standing_perct_list, 'walking': walking_perct_list}
# create the dataframe
df = pd.DataFrame(corr_data, index=data_index)
# calculate the correlation
pd_series = ut.get_top_abs_correlations(df, limit)
print('====correlated variables=====')
print(pd_series)
for i in range(limit):
# this dictionary will get the pandas.Series object's indices and values separately
corr_dict = {}
index = pd_series.index[i]
# check whether the first index is a student activity
isStudentGaze = index[0] in student_gaze_labels
# check whether the second index is a lecturer activity
isLecturerAct = index[1] in lecturer_activity_labels
# if both are student and lecturer activities, add to the dictionary
if isStudentGaze & isLecturerAct:
corr_dict['index'] = index
corr_dict['value'] = pd_series.values[i]
# append the dictionary to the 'correlations' list
correlations.append(corr_dict)
# return the list
return correlations
def get_redundant_pairs(df):
'''Get diagonal and lower triangular pairs of correlation matrix'''
pairs_to_drop = set()
cols = df.columns
for i in range(0, df.shape[1]):
for j in range(0, i+1):
pairs_to_drop.add((cols[i], cols[j]))
return pairs_to_drop
def get_top_abs_correlations(df, n):
au_corr = df.corr().abs().unstack()
labels_to_drop = get_redundant_pairs(df)
au_corr = au_corr.drop(labels=labels_to_drop).sort_values(ascending=False)
return au_corr[0:n]
This diff is collapsed.
...@@ -243,21 +243,19 @@ ...@@ -243,21 +243,19 @@
//define the student video src //define the student video src
let video_src = "{% static '' %}FirstApp/videos/" + global_video_name; let video_src = "{% static '' %}FirstApp/videos/" + global_video_name;
{#global_lecturer_video_name = "Test_1.mp4";#}
{#global_lecturer_video_name = "Test_2.mp4";#}
global_lecturer_video_name = "Test_3.mp4";
//define the lecturer video src
let lecturer_video_src = "{% static '' %}FirstApp/lecturer_videos/" + global_lecturer_video_name;
//assign the video src //assign the video src
$('#student_video').attr('src', video_src); $('#student_video').attr('src', video_src);
//assign the video src //fetch the lecture recorded video name
$('#lecturer_video').attr('src', lecturer_video_src); fetch('http://127.0.0.1:8000/get-lecture-recorded-video-name/?lecturer=' + global_lecturer + '&subject=' + global_subject + '&date=' + global_lecture_date)
.then((res) => res.json())
.then((out) => assignLecturerRecordedVideoName(out))
.catch((err) => alert('error: ' + err));
{#global_lecturer_video_name = "Test_1.mp4";#}
{#global_lecturer_video_name = "Test_2.mp4";#}
{#global_lecturer_video_name = "Test_3.mp4";#}
$('#integrate_modal').modal();
//fetch data from the API //fetch data from the API
...@@ -268,6 +266,21 @@ ...@@ -268,6 +266,21 @@
}); });
//assign the lecturer recorded video name
function assignLecturerRecordedVideoName(res) {
global_lecturer_video_name = res.video_name;
//define the lecturer video src
let lecturer_video_src = "{% static '' %}FirstApp/lecturer_videos/" + global_lecturer_video_name;
//assign the video src
$('#lecturer_video').attr('src', lecturer_video_src);
$('#integrate_modal').modal();
}
//this function will load the activity recognition for frames //this function will load the activity recognition for frames
function displayActivityRecognitionForFrame(response) { function displayActivityRecognitionForFrame(response) {
//hide the loader //hide the loader
...@@ -527,11 +540,11 @@ ...@@ -527,11 +540,11 @@
{% load static %} {% load static %}
<!-- Page Heading --> <!-- Page Heading -->
{# <div class="d-sm-flex align-items-center justify-content-between mb-4">#} <div class="d-sm-flex align-items-center justify-content-between mb-4">
{# <h1 class="h3 mb-0 text-gray-800">Student Activity Recognition</h1>#} <h1 class="h3 mb-0 text-gray-800">Student Activity Recognition</h1>
{# <button type="button" data-target="#generateReportModal" data-toggle="modal" class="d-none d-sm-inline-block btn btn-sm btn-primary shadow-sm" id="generate_report_before" disabled><i#} {# <button type="button" data-target="#generateReportModal" data-toggle="modal" class="d-none d-sm-inline-block btn btn-sm btn-primary shadow-sm" id="generate_report_before" disabled><i#}
{# class="fas fa-download fa-sm text-white-50"></i> Generate Report</button>#} {# class="fas fa-download fa-sm text-white-50"></i> Generate Report</button>#}
{# </div>#} </div>
<!--first row --> <!--first row -->
......
...@@ -203,6 +203,21 @@ urlpatterns = [ ...@@ -203,6 +203,21 @@ urlpatterns = [
# retrieves lecture activity summary # retrieves lecture activity summary
url(r'^get-lecture-gaze-summary/$', api.GetLectureGazeSummary.as_view()), url(r'^get-lecture-gaze-summary/$', api.GetLectureGazeSummary.as_view()),
# retrieves lecture activity summary
url(r'^get-activity-correlations/$', api.GetLectureActivityCorrelations.as_view()),
# retrieves lecture activity summary
url(r'^get-emotion-correlations/$', api.GetLectureEmotionCorrelations.as_view()),
# retrieves lecture activity summary
url(r'^get-gaze-correlations/$', api.GetLectureGazeCorrelations.as_view()),
##### OTHERS #####
# retrieves lecture recorded video name
url(r'^get-lecture-recorded-video-name/$', api.GetLecturerRecordedVideo.as_view()),
# routers # routers
# path('', include(router.urls)), # path('', include(router.urls)),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment