Commit 2fcd4b02 authored by I.K Seneviratne's avatar I.K Seneviratne

Committing the correlation of student behavior and lecturer evaluation...

Committing the correlation of student behavior and lecturer evaluation components in the lecturer home page.
parent 32a6e9d0
from rest_framework.permissions import IsAuthenticated, IsAdminUser
from rest_framework.authentication import SessionAuthentication, BasicAuthentication
from MonitorLecturerApp.models import LectureRecordedVideo, LecturerVideoMetaData
from MonitorLecturerApp.serializers import LectureRecordedVideoSerializer, LecturerVideoMetaDataSerializer
from .MongoModels import *
from rest_framework.views import *
from .ImageOperations import saveImage
......@@ -1270,4 +1272,179 @@ class GetLectureGazeSummary(APIView):
"frame_landmarks": frame_landmarks,
"frame_group_percentages": frame_group_percentages,
"gaze_labels": gaze_labels
})
\ No newline at end of file
})
# =====OTHERS=====
class GetLecturerRecordedVideo(APIView):
def get(self, request):
lecturer = request.query_params.get('lecturer')
subject = request.query_params.get('subject')
date = request.query_params.get('date')
# retrieve data
lec_recorded_video = LectureRecordedVideo.objects.filter(lecturer_id=lecturer, subject__subject_code=subject, lecturer_date=date)
lec_recorded_video_ser = LectureRecordedVideoSerializer(lec_recorded_video, many=True)
lec_recorded_video_data = lec_recorded_video_ser.data[0]
video_name = lec_recorded_video_data['lecture_video_name']
print('lecturer recorded video name: ', video_name)
return Response({
"video_name": video_name
})
# this API will get lecture activity correlations
class GetLectureActivityCorrelations(APIView):
def get(self, request):
option = request.query_params.get('option')
lecturer = request.query_params.get('lecturer')
int_option = int(option)
current_date = datetime.datetime.now().date()
option_date = datetime.timedelta(days=int_option)
previous_date = current_date - option_date
individual_lec_activities = []
activity_correlations = []
# retrieving lecture activities
lec_activity = LectureActivity.objects.filter(
lecture_video_id__date__gte=previous_date,
lecture_video_id__date__lte=current_date,
lecture_video_id__lecturer=lecturer
)
if len(lec_activity) > 0:
isRecordFound = True
activity_serializer = LectureActivitySerializer(lec_activity, many=True)
activity_data = activity_serializer.data
_, individual_lec_activities, _ = ar.get_student_activity_summary_for_period(activity_data)
# retrieving lecturer recorded activities
lec_recorded_activity = LecturerVideoMetaData.objects.filter(
lecturer_video_id__lecturer_date__gte=previous_date,
lecturer_video_id__lecturer_date__lte=current_date,
lecturer_video_id__lecturer=lecturer
)
if len(lec_recorded_activity) > 0:
lec_recorded_activity_ser = LecturerVideoMetaDataSerializer(lec_recorded_activity, many=True)
lec_recorded_activity_data = lec_recorded_activity_ser.data
activity_correlations = ar.get_activity_correlations(individual_lec_activities, lec_recorded_activity_data)
print('activity correlations: ', activity_correlations)
return Response({
"correlations": activity_correlations
})
# this API will get lecture emotion correlations
class GetLectureEmotionCorrelations(APIView):
def get(self, request):
option = request.query_params.get('option')
lecturer = request.query_params.get('lecturer')
int_option = int(option)
current_date = datetime.datetime.now().date()
option_date = datetime.timedelta(days=int_option)
previous_date = current_date - option_date
individual_lec_emotions = []
emotion_correlations = []
# retrieving lecture activities
lec_emotion = LectureEmotionReport.objects.filter(
lecture_video_id__date__gte=previous_date,
lecture_video_id__date__lte=current_date,
lecture_video_id__lecturer=lecturer
)
# if there are lecture emotions
if len(lec_emotion) > 0:
emotion_serializer = LectureEmotionSerializer(lec_emotion, many=True)
emotion_data = emotion_serializer.data
_, individual_lec_emotions, _ = ed.get_student_emotion_summary_for_period(emotion_data)
# retrieving lecturer recorded activities
lec_recorded_activity = LecturerVideoMetaData.objects.filter(
lecturer_video_id__lecturer_date__gte=previous_date,
lecturer_video_id__lecturer_date__lte=current_date,
lecturer_video_id__lecturer=lecturer
)
# if there are any recorded lectures
if len(lec_recorded_activity) > 0:
lec_recorded_activity_ser = LecturerVideoMetaDataSerializer(lec_recorded_activity, many=True)
lec_recorded_activity_data = lec_recorded_activity_ser.data
emotion_correlations = ed.get_emotion_correlations(individual_lec_emotions, lec_recorded_activity_data)
return Response({
"correlations": emotion_correlations
})
# this API will get lecture gaze correlations
class GetLectureGazeCorrelations(APIView):
def get(self, request):
option = request.query_params.get('option')
lecturer = request.query_params.get('lecturer')
int_option = int(option)
current_date = datetime.datetime.now().date()
option_date = datetime.timedelta(days=int_option)
previous_date = current_date - option_date
individual_lec_gaze = []
gaze_correlations = []
# retrieving lecture activities
lec_gaze = LectureGazeEstimation.objects.filter(
lecture_video_id__date__gte=previous_date,
lecture_video_id__date__lte=current_date,
lecture_video_id__lecturer=lecturer
)
# if there are gaze estimations
if len(lec_gaze) > 0:
gaze_serializer = LectureGazeEstimationSerializer(lec_gaze, many=True)
gaze_data = gaze_serializer.data
_, individual_lec_gaze, _ = hge.get_student_gaze_estimation_summary_for_period(gaze_data)
# retrieving lecturer recorded activities
lec_recorded_activity = LecturerVideoMetaData.objects.filter(
lecturer_video_id__lecturer_date__gte=previous_date,
lecturer_video_id__lecturer_date__lte=current_date,
lecturer_video_id__lecturer=lecturer
)
# if there are any recorded lectures
if len(lec_recorded_activity) > 0:
lec_recorded_activity_ser = LecturerVideoMetaDataSerializer(lec_recorded_activity, many=True)
lec_recorded_activity_data = lec_recorded_activity_ser.data
# find the correlations between lecture gaze estimations and recorded lecture
gaze_correlations = hge.get_gaze_correlations(individual_lec_gaze, lec_recorded_activity_data)
return Response({
"correlations": gaze_correlations
})
......@@ -11,12 +11,12 @@ from . models import VideoMeta
from . logic import custom_sorter as cs
from .logic import id_generator as ig
from .logic import activity_recognition as ar
# emotion recognition method
from .logic import utilities as ut
from .serializers import LectureEmotionSerializer
import pandas as pd
# emotion recognition method
def emotion_recognition(classifier, face_classifier, image):
label = ""
class_labels = ['Angry', 'Happy', 'Neutral', 'Sad', 'Surprise']
......@@ -686,3 +686,79 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict):
# save
new_lec_emotion_frame_groupings.save()
# this method will get emotion correlations
def get_emotion_correlations(individual_lec_emotions, lec_recorded_activity_data):
# this variable will be used to store the correlations
correlations = []
limit = 10
data_index = ['lecture-{}'.format(i + 1) for i in range(len(individual_lec_emotions))]
# student activity labels
student_emotion_labels = ['Happy', 'Sad', 'Angry', 'Surprise', 'Neutral']
lecturer_activity_labels = ['seated', 'standing', 'walking']
# lecturer recorded data list (lecturer)
sitting_perct_list = []
standing_perct_list = []
walking_perct_list = []
# lecture activity data list (student)
happy_perct_list = []
sad_perct_list = []
angry_perct_list = []
surprise_perct_list = []
neutral_perct_list = []
# loop through the lecturer recorded data (lecturer)
for data in lec_recorded_activity_data:
sitting_perct_list.append(int(data['seated_count']))
standing_perct_list.append(int(data['standing_count']))
walking_perct_list.append(int(data['walking_count']))
# loop through the lecturer recorded data (student)
for data in individual_lec_emotions:
happy_perct_list.append(int(data['happy_perct']))
sad_perct_list.append(int(data['sad_perct']))
angry_perct_list.append(int(data['angry_perct']))
surprise_perct_list.append(int(data['surprise_perct']))
neutral_perct_list.append(int(data['neutral_perct']))
corr_data = {'Happy': happy_perct_list, 'Sad': sad_perct_list, 'Angry': angry_perct_list, 'Surprise': surprise_perct_list, 'Neutral': neutral_perct_list,
'seated': sitting_perct_list, 'standing': standing_perct_list, 'walking': walking_perct_list}
# create the dataframe
df = pd.DataFrame(corr_data, index=data_index)
# calculate the correlation
pd_series = ut.get_top_abs_correlations(df, limit)
print('====correlated variables=====')
print(pd_series)
for i in range(limit):
# this dictionary will get the pandas.Series object's indices and values separately
corr_dict = {}
index = pd_series.index[i]
# check whether the first index is a student activity
isStudentEmotion = index[0] in student_emotion_labels
# check whether the second index is a lecturer activity
isLecturerAct = index[1] in lecturer_activity_labels
# if both are student and lecturer activities, add to the dictionary
if isStudentEmotion & isLecturerAct:
corr_dict['index'] = index
corr_dict['value'] = pd_series.values[i]
# append the dictionary to the 'correlations' list
correlations.append(corr_dict)
# return the list
return correlations
......@@ -9,6 +9,9 @@ from .custom_sorter import *
from ..MongoModels import *
from ..serializers import *
from . import id_generator as ig
from . import utilities as ut
import pandas as pd
def activity_recognition(video_path):
......@@ -849,3 +852,77 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict):
# save
new_lec_activity_frame_groupings.save()
# this method will get activity correlations
def get_activity_correlations(individual_lec_activities, lec_recorded_activity_data):
# this variable will be used to store the correlations
correlations = []
limit = 10
data_index = ['lecture-{}'.format(i+1) for i in range(len(individual_lec_activities))]
# student activity labels
student_activity_labels = ['phone checking', 'listening', 'note taking']
lecturer_activity_labels = ['seated', 'standing', 'walking']
# lecturer recorded data list (lecturer)
sitting_perct_list = []
standing_perct_list = []
walking_perct_list = []
# lecture activity data list (student)
phone_perct_list = []
listen_perct_list = []
note_perct_list = []
# loop through the lecturer recorded data (lecturer)
for data in lec_recorded_activity_data:
sitting_perct_list.append(int(data['seated_count']))
standing_perct_list.append(int(data['standing_count']))
walking_perct_list.append(int(data['walking_count']))
# loop through the lecturer recorded data (student)
for data in individual_lec_activities:
phone_perct_list.append(int(data['phone_perct']))
listen_perct_list.append(int(data['listening_perct']))
note_perct_list.append(int(data['writing_perct']))
corr_data = {'phone checking': phone_perct_list, 'listening': listen_perct_list, 'note taking': note_perct_list,
'seated': sitting_perct_list, 'standing': standing_perct_list, 'walking': walking_perct_list}
# create the dataframe
df = pd.DataFrame(corr_data, index=data_index)
# calculate the correlation
pd_series = ut.get_top_abs_correlations(df, limit)
print('====correlated variables=====')
print(pd_series)
for i in range(limit):
# this dictionary will get the pandas.Series object's indices and values separately
corr_dict = {}
index = pd_series.index[i]
# check whether the first index is a student activity
isStudentAct = index[0] in student_activity_labels
# check whether the second index is a lecturer activity
isLecturerAct = index[1] in lecturer_activity_labels
# if both are student and lecturer activities, add to the doctionary
if isStudentAct & isLecturerAct:
corr_dict['index'] = index
corr_dict['value'] = pd_series.values[i]
# append the dictionary to the 'correlations' list
correlations.append(corr_dict)
# return the list
return correlations
......@@ -15,10 +15,12 @@ from . face_landmarks import get_landmark_model, detect_marks
import os
import shutil
import math
import pandas as pd
from ..MongoModels import *
from ..serializers import *
from . import id_generator as ig
from . import utilities as ut
def get_2d_points(img, rotation_vector, translation_vector, camera_matrix, val):
......@@ -964,3 +966,77 @@ def save_frame_groupings(video_name, frame_landmarks, frame_group_dict):
# save
new_lec_gaze_frame_groupings.save()
# this method will get gaze estimation correlations
def get_gaze_correlations(individual_lec_gaze, lec_recorded_activity_data):
# this variable will be used to store the correlations
correlations = []
limit = 10
data_index = ['lecture-{}'.format(i + 1) for i in range(len(individual_lec_gaze))]
# student gaze labels
student_gaze_labels = ['Up and Right', 'Up and Left', 'Down and Right', 'Down and Left', 'Front']
lecturer_activity_labels = ['seated', 'standing', 'walking']
# lecturer recorded data list (lecturer)
sitting_perct_list = []
standing_perct_list = []
walking_perct_list = []
# lecture activity data list (student)
upright_perct_list = []
upleft_perct_list = []
downright_perct_list = []
downleft_perct_list = []
front_perct_list = []
# loop through the lecturer recorded data (lecturer)
for data in lec_recorded_activity_data:
sitting_perct_list.append(int(data['seated_count']))
standing_perct_list.append(int(data['standing_count']))
walking_perct_list.append(int(data['walking_count']))
# loop through the lecturer recorded data (student)
for data in individual_lec_gaze:
upright_perct_list.append(int(data['looking_up_and_right_perct']))
upleft_perct_list.append(int(data['looking_up_and_left_perct']))
downright_perct_list.append(int(data['looking_down_and_right_perct']))
downleft_perct_list.append(int(data['looking_down_and_left_perct']))
front_perct_list.append(int(data['looking_front_perct']))
corr_data = {'Up and Right': upright_perct_list, 'Up and Left': upleft_perct_list, 'Down and Right': downright_perct_list,
'Down and Left': downleft_perct_list, 'Front': front_perct_list,
'seated': sitting_perct_list, 'standing': standing_perct_list, 'walking': walking_perct_list}
# create the dataframe
df = pd.DataFrame(corr_data, index=data_index)
# calculate the correlation
pd_series = ut.get_top_abs_correlations(df, limit)
print('====correlated variables=====')
print(pd_series)
for i in range(limit):
# this dictionary will get the pandas.Series object's indices and values separately
corr_dict = {}
index = pd_series.index[i]
# check whether the first index is a student activity
isStudentGaze = index[0] in student_gaze_labels
# check whether the second index is a lecturer activity
isLecturerAct = index[1] in lecturer_activity_labels
# if both are student and lecturer activities, add to the dictionary
if isStudentGaze & isLecturerAct:
corr_dict['index'] = index
corr_dict['value'] = pd_series.values[i]
# append the dictionary to the 'correlations' list
correlations.append(corr_dict)
# return the list
return correlations
def get_redundant_pairs(df):
'''Get diagonal and lower triangular pairs of correlation matrix'''
pairs_to_drop = set()
cols = df.columns
for i in range(0, df.shape[1]):
for j in range(0, i+1):
pairs_to_drop.add((cols[i], cols[j]))
return pairs_to_drop
def get_top_abs_correlations(df, n):
au_corr = df.corr().abs().unstack()
labels_to_drop = get_redundant_pairs(df)
au_corr = au_corr.drop(labels=labels_to_drop).sort_values(ascending=False)
return au_corr[0:n]
......@@ -234,9 +234,9 @@
//fetch the video time landmark details
fetch('http://127.0.0.1:8000/get-lecture-video-summary-time-landmarks/?video_name=' + global_video_name)
.then((res) => res.json())
.then((out) => assignTimeLandmarks(out.response))
.catch((err) => alert('error: ' + err));
.then((res) => res.json())
.then((out) => assignTimeLandmarks(out.response))
.catch((err) => alert('error: ' + err));
//display the progress bar area
......@@ -251,7 +251,6 @@
}
//this function will handle the activity 'summary' button
$('#activity_summary_btn').click(function (e) {
......@@ -260,9 +259,9 @@
//fetch the activity summary details
fetch('http://127.0.0.1:8000/get-lecture-activity-summary/?video_name=' + global_video_name)
.then((res) => res.json())
.then((out) => activityFrameGroupPercentages(out, e))
.catch((err) => alert('error: ' + err));
.then((res) => res.json())
.then((out) => activityFrameGroupPercentages(out, e))
.catch((err) => alert('error: ' + err));
});
......@@ -275,9 +274,9 @@
//fetch the activity summary details
fetch('http://127.0.0.1:8000/get-lecture-emotion-summary/?video_name=' + global_video_name)
.then((res) => res.json())
.then((out) => emotionFrameGroupPercentages(out, e))
.catch((err) => alert('error: ' + err));
.then((res) => res.json())
.then((out) => emotionFrameGroupPercentages(out, e))
.catch((err) => alert('error: ' + err));
});
......@@ -288,13 +287,12 @@
//fetch the activity summary details
fetch('http://127.0.0.1:8000/get-lecture-gaze-summary/?video_name=' + global_video_name)
.then((res) => res.json())
.then((out) => gazeFrameGroupPercentages(out, e))
.catch((err) => alert('error: ' + err));
.then((res) => res.json())
.then((out) => gazeFrameGroupPercentages(out, e))
.catch((err) => alert('error: ' + err));
});
//this function will handle the retrieved activity frame group percentages
function activityFrameGroupPercentages(response, e) {
......@@ -357,7 +355,6 @@
}
//this function will call the activity chart function
function renderActivityChart(activity_labels) {
......@@ -486,7 +483,6 @@
}
var chart = new CanvasJS.Chart("EmotionChartContainer", {
animationEnabled: true,
theme: "light2",
......@@ -570,7 +566,6 @@
}
var chart = new CanvasJS.Chart("GazeChartContainer", {
animationEnabled: true,
theme: "light2",
......@@ -609,7 +604,6 @@
}
//this function will render the chart for Activity statistics
function renderActivityStatistics() {
......@@ -626,7 +620,6 @@
];
for (let i = 0; i < label_length; i++) {
let label = activity_labels[i];
......@@ -634,7 +627,7 @@
for (let j = 0; j < activity_length; j++) {
let activity = individual_activities[j];
datapoints.push({label: "lecture " + (j+1), y: activity[label]});
datapoints.push({label: "lecture " + (j + 1), y: activity[label]});
}
......@@ -644,7 +637,7 @@
name: label,
markerType: "square",
{#xValueFormatString: "DD MMM, YYYY",#}
xValueFormatString: "lec " + (i+1),
xValueFormatString: "lec " + (i + 1),
color: getRandomColor(),
dataPoints: datapoints
};
......@@ -714,7 +707,7 @@
for (let j = 0; j < emotion_length; j++) {
let emotion = individual_emotions[j];
datapoints.push({label: "lecture " + (j+1), y: emotion[label]});
datapoints.push({label: "lecture " + (j + 1), y: emotion[label]});
}
let obj = {
......@@ -723,7 +716,7 @@
name: label,
markerType: "square",
{#xValueFormatString: "DD MMM, YYYY",#}
xValueFormatString: "Lec " + (i+1),
xValueFormatString: "Lec " + (i + 1),
color: colors[i - 1],
dataPoints: datapoints
};
......@@ -740,7 +733,7 @@
axisX: {
title: "Lecture",
{#valueFormatString: "DD MMM",#}
valueFormatString: "lec" ,
valueFormatString: "lec",
crosshair: {
enabled: true,
snapToDataPoint: true
......@@ -792,7 +785,7 @@
for (let j = 0; j < gaze_estimation_length; j++) {
let gaze_estimation = individual_gaze_estimations[j];
datapoints.push({label: "lecture " + (j+1), y: gaze_estimation[label]});
datapoints.push({label: "lecture " + (j + 1), y: gaze_estimation[label]});
}
let obj = {
......@@ -801,7 +794,7 @@
name: label,
markerType: "square",
{#xValueFormatString: "DD MMM, YYYY",#}
xValueFormatString: "Lec " + (i+1),
xValueFormatString: "Lec " + (i + 1),
color: colors[i - 1],
dataPoints: datapoints
};
......@@ -818,7 +811,7 @@
axisX: {
title: "Lecture",
{#valueFormatString: "DD MMM",#}
valueFormatString: "lec" ,
valueFormatString: "lec",
crosshair: {
enabled: true,
snapToDataPoint: true
......@@ -862,7 +855,7 @@
$('#student_behavior_view_summary_modal').modal();
});
});
//this function will handle the view summary option form
$('#view_summary_option_form').submit(function (e) {
......@@ -968,6 +961,21 @@
//this function will handle the advanced analysis for activity
$('#activity_advanced_btn').click(function () {
$('#activity_advanced_modal').modal();
//enable the loader
$('#activity_corr_loader').attr('hidden', false);
let lecturer = "{{ lecturer }}";
let option = $("input[name='option']:checked").val();
//fetch the correlation data
fetch('http://127.0.0.1:8000/get-activity-correlations/?lecturer=' + lecturer + '&option=' + option)
.then((res) => res.json())
.then((out) => displayActivityCorrelations(out.correlations))
.catch((err) => alert('error: ' + err));
});
......@@ -975,14 +983,210 @@
$('#emotion_advanced_btn').click(function () {
$('#emotion_advanced_modal').modal();
//enable the loader
$('#emotion_corr_loader').attr('hidden', false);
let lecturer = "{{ lecturer }}";
let option = $("input[name='option']:checked").val();
//fetch the correlation data
fetch('http://127.0.0.1:8000/get-emotion-correlations/?lecturer=' + lecturer + "&option=" + option)
.then((res) => res.json())
.then((out) => displayEmotionCorrelations(out.correlations))
.catch((err) => alert('err: ' + err));
});
//this function will handle the advanced analysis for gaze
$('#gaze_advanced_btn').click(function () {
$('#gaze_advanced_modal').modal();
//enable the loader
$('#gaze_corr_loader').attr('hidden', false);
let lecturer = "{{ lecturer }}";
let option = $("input[name='option']:checked").val();
//fetch the correlation data
fetch('http://127.0.0.1:8000/get-gaze-correlations/?lecturer=' + lecturer + "&option=" + option)
.then((res) => res.json())
.then((out) => displayGazeCorrelations(out.correlations))
.catch((err) => alert('err: ' + err));
});
//this method will display the activity correlations in a table
function displayActivityCorrelations(correlations) {
let htmlString = "";
//create the html content for the activity correlation table
for (let i = 0; i < correlations.length; i++) {
let corr = correlations[i];
let indices = corr.index;
let value = corr.value;
value = Math.round(value * 100, 1);
if (value <= 100 && value > 80) {
htmlString += "<tr class='bg-success text-white'>";
}
else if (value <= 80 && value > 60) {
htmlString += "<tr class='bg-primary text-white'>";
}
else if (value <= 60 && value > 40) {
htmlString += "<tr class='bg-warning text-white'>";
}
else if (value <= 40 && value > 20) {
htmlString += "<tr class='bg-danger text-white'>";
}
else if (value <= 20 && value > 0) {
htmlString += "<tr class='bg-dark text-white'>";
}
//create a <tr> to be inserted
htmlString += "<td>";
htmlString += indices[0];
htmlString += "</td>";
htmlString += "<td>";
htmlString += indices[1];
htmlString += "</td>";
htmlString += "<td>";
htmlString += value;
htmlString += "</td>";
htmlString += "</tr>";
}
//append to the <tbody>
$('#activity_corr_tbody').append(htmlString);
//hide the loader
$('#activity_corr_loader').hide();
//show the table
$('#activity_corr_table').attr('hidden', false);
}
//this method will display the emotion correlations in a table
function displayEmotionCorrelations(correlations) {
let htmlString = "";
//create the html content for the activity correlation table
for (let i = 0; i < correlations.length; i++) {
let corr = correlations[i];
let indices = corr.index;
let value = corr.value;
value = Math.round(value * 100, 1);
if (value <= 100 && value > 80) {
htmlString += "<tr class='bg-success text-white'>";
}
else if (value <= 80 && value > 60) {
htmlString += "<tr class='bg-primary text-white'>";
}
else if (value <= 60 && value > 40) {
htmlString += "<tr class='bg-warning text-white'>";
}
else if (value <= 40 && value > 20) {
htmlString += "<tr class='bg-danger text-white'>";
}
else if (value <= 20 && value > 0) {
htmlString += "<tr class='bg-dark text-white'>";
}
//create a <tr> to be inserted
htmlString += "<td>";
htmlString += indices[0];
htmlString += "</td>";
htmlString += "<td>";
htmlString += indices[1];
htmlString += "</td>";
htmlString += "<td>";
htmlString += value;
htmlString += "</td>";
htmlString += "</tr>";
}
//append to the <tbody>
$('#emotion_corr_tbody').append(htmlString);
//hide the loader
$('#emotion_corr_loader').hide();
//show the table
$('#emotion_corr_table').attr('hidden', false);
}
//this method will display the activity correlations in a table
function displayGazeCorrelations(correlations) {
let htmlString = "";
//create the html content for the activity correlation table
for (let i = 0; i < correlations.length; i++) {
let corr = correlations[i];
let indices = corr.index;
let value = corr.value;
value = Math.round(value * 100, 1);
if (value <= 100 && value > 80) {
htmlString += "<tr class='bg-success text-white'>";
}
else if (value <= 80 && value > 60) {
htmlString += "<tr class='bg-primary text-white'>";
}
else if (value <= 60 && value > 40) {
htmlString += "<tr class='bg-warning text-white'>";
}
else if (value <= 40 && value > 20) {
htmlString += "<tr class='bg-danger text-white'>";
}
else if (value <= 20 && value > 0) {
htmlString += "<tr class='bg-dark text-white'>";
}
//create a <tr> to be inserted
htmlString += "<td>";
htmlString += indices[0];
htmlString += "</td>";
htmlString += "<td>";
htmlString += indices[1];
htmlString += "</td>";
htmlString += "<td>";
htmlString += value;
htmlString += "</td>";
htmlString += "</tr>";
}
//append to the <tbody>
$('#gaze_corr_tbody').append(htmlString);
//hide the loader
$('#gaze_corr_loader').hide();
//show the table
$('#gaze_corr_table').attr('hidden', false);
}
});
</script>
......@@ -1207,6 +1411,13 @@
</button>
</div>
<!-- end of stats button -->
<!-- button to view advanced analysis -->
<button type="button" class="btn btn-danger float-right mr-2"
id="activity_advanced_btn">
Advanced Analysis
</button>
<!-- end of button to view advanced analysis -->
</div>
</div>
<!-- end of Activity card -->
......@@ -1282,6 +1493,14 @@
</button>
</div>
<!-- end of stats button -->
<!-- button to view advanced analysis -->
<button type="button" class="btn btn-danger float-right mr-2"
id="emotion_advanced_btn">
Advanced Analysis
</button>
<!-- end of button to view advanced analysis -->
</div>
</div>
......@@ -1351,6 +1570,14 @@
</button>
</div>
<!-- end of stats button -->
<!-- button to view advanced analysis -->
<button type="button" class="btn btn-danger float-right mr-2"
id="gaze_advanced_btn">
Advanced Analysis
</button>
<!-- end of button to view advanced analysis -->
</div>
</div>
......@@ -1418,18 +1645,13 @@
<hr>
<!-- button to view activity summary -->
<button type="button" class="btn btn-primary float-right" id="activity_summary_btn">
<button type="button" class="btn btn-primary float-right"
id="activity_summary_btn">
Summary
</button>
<!-- end of button to view activity summary -->
<!-- button to view advanced analysis -->
<button type="button" class="btn btn-danger float-right mr-2" id="activity_advanced_btn">
Advanced Analysis
</button>
<!-- end of button to view advanced analysis -->
</li>
<!-- end of the activity list item -->
......@@ -1495,19 +1717,13 @@
<hr>
<!-- button to view emotion summary -->
<button type="button" class="btn btn-primary float-right" id="emotion_summary_btn">
<button type="button" class="btn btn-primary float-right"
id="emotion_summary_btn">
Summary
</button>
<!-- end of button to view emotion summary -->
<!-- button to view advanced analysis -->
<button type="button" class="btn btn-danger float-right mr-2" id="emotion_advanced_btn">
Advanced Analysis
</button>
<!-- end of button to view advanced analysis -->
</li>
<!-- end of the emotion list item -->
......@@ -1548,7 +1764,8 @@
<span class="float-right" id="looking_down_right_perct">50%</span>
<div class="progress mb-4">
<div class="progress-bar bg-success" role="progressbar" id="looking_down_right_width"
<div class="progress-bar bg-success" role="progressbar"
id="looking_down_right_width"
style="width: 60%"
aria-valuenow="60" aria-valuemin="0" aria-valuemax="100"></div>
</div>
......@@ -1579,24 +1796,17 @@
<!-- button to view gaze summary -->
<button type="button" class="btn btn-primary float-right" id="gaze_summary_btn">
<button type="button" class="btn btn-primary float-right"
id="gaze_summary_btn">
Summary
</button>
<!-- end of button to view gaze summary -->
<!-- button to view advanced analysis -->
<button type="button" class="btn btn-danger float-right mr-2" id="gaze_advanced_btn">
Advanced Analysis
</button>
<!-- end of button to view advanced analysis -->
</li>
<!-- end of the gaze list item -->
</ul>
......@@ -1926,7 +2136,8 @@
</div>
<div class="custom-control custom-radio mt-2">
<input type="radio" class="custom-control-input" id="customRadio3" name="option" value="10000">
<input type="radio" class="custom-control-input" id="customRadio3" name="option"
value="10000">
<label class="custom-control-label" for="customRadio3">All</label>
</div>
......@@ -1997,7 +2208,8 @@
<!-- gaze estimation Modal-->
<div class="modal fade" id="gaze_estimation_stats_modal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel"
<div class="modal fade" id="gaze_estimation_stats_modal" tabindex="-1" role="dialog"
aria-labelledby="exampleModalLabel"
aria-hidden="true">
<div class="modal-dialog" role="document" style="max-width: 1400px">
<div class="modal-content">
......@@ -2022,7 +2234,7 @@
<!-- activity advanced analysis modal -->
<div class="modal fade" id="activity_advanced_modal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel"
aria-hidden="true">
<div class="modal-dialog" role="document" style="max-width: 1400px">
<div class="modal-dialog" role="document" style="max-width: 700px">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLabel">Activity Advanced Analysis</h5>
......@@ -2031,7 +2243,29 @@
</button>
</div>
<div class="modal-body text-center">
<p>Hello</p>
<h3 class="font-weight-bold">Student Activity VS. Lecturer Activity</h3>
<!-- ajax loader -->
<div class="text-center" id="activity_corr_loader" hidden>
<img src="{% static 'FirstApp/images/ajax-loader.gif' %}" alt="Loader">
</div>
<!-- correlation table -->
<table class="table table-striped" id="activity_corr_table" hidden>
<thead>
<tr>
<th>Student Activity</th>
<th>Lecturer Activity</th>
<th>Correlation Score</th>
</tr>
</thead>
<tbody id="activity_corr_tbody">
</tbody>
</table>
<!-- end of correlation table -->
</div>
<div class="modal-footer">
<button class="btn btn-secondary" type="button" data-dismiss="modal">Cancel</button>
......@@ -2044,7 +2278,7 @@
<!-- emotion advanced analysis modal -->
<div class="modal fade" id="emotion_advanced_modal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel"
aria-hidden="true">
<div class="modal-dialog" role="document" style="max-width: 1400px">
<div class="modal-dialog" role="document" style="max-width: 700px">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLabel">Emotion Advanced Analysis</h5>
......@@ -2053,7 +2287,29 @@
</button>
</div>
<div class="modal-body text-center">
<p>Hello</p>
<h3 class="font-weight-bold">Student Emotions VS. Lecturer Activity</h3>
<!-- ajax loader -->
<div class="text-center" id="emotion_corr_loader" hidden>
<img src="{% static 'FirstApp/images/ajax-loader.gif' %}" alt="Loader">
</div>
<!-- correlation table -->
<table class="table table-striped" id="emotion_corr_table" hidden>
<thead>
<tr>
<th>Student Emotion</th>
<th>Lecturer Activity</th>
<th>Correlation Score</th>
</tr>
</thead>
<tbody id="emotion_corr_tbody">
</tbody>
</table>
<!-- end of correlation table -->
</div>
<div class="modal-footer">
<button class="btn btn-secondary" type="button" data-dismiss="modal">Cancel</button>
......@@ -2066,7 +2322,7 @@
<!-- gaze advanced analysis modal -->
<div class="modal fade" id="gaze_advanced_modal" tabindex="-1" role="dialog" aria-labelledby="exampleModalLabel"
aria-hidden="true">
<div class="modal-dialog" role="document" style="max-width: 1400px">
<div class="modal-dialog" role="document" style="max-width: 700px">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLabel">Gaze Advanced Analysis</h5>
......@@ -2075,7 +2331,29 @@
</button>
</div>
<div class="modal-body text-center">
<p>Hello</p>
<h3 class="font-weight-bold">Student Gaze estimation VS. Lecturer Activity</h3>
<!-- ajax loader -->
<div class="text-center" id="gaze_corr_loader" hidden>
<img src="{% static 'FirstApp/images/ajax-loader.gif' %}" alt="Loader">
</div>
<!-- correlation table -->
<table class="table table-striped" id="gaze_corr_table" hidden>
<thead>
<tr>
<th>Student Gaze estimation</th>
<th>Lecturer Activity</th>
<th>Correlation Score</th>
</tr>
</thead>
<tbody id="gaze_corr_tbody">
</tbody>
</table>
<!-- end of correlation table -->
</div>
<div class="modal-footer">
<button class="btn btn-secondary" type="button" data-dismiss="modal">Cancel</button>
......
......@@ -243,21 +243,19 @@
//define the student video src
let video_src = "{% static '' %}FirstApp/videos/" + global_video_name;
{#global_lecturer_video_name = "Test_1.mp4";#}
{#global_lecturer_video_name = "Test_2.mp4";#}
global_lecturer_video_name = "Test_3.mp4";
//define the lecturer video src
let lecturer_video_src = "{% static '' %}FirstApp/lecturer_videos/" + global_lecturer_video_name;
//assign the video src
$('#student_video').attr('src', video_src);
//assign the video src
$('#lecturer_video').attr('src', lecturer_video_src);
//fetch the lecture recorded video name
fetch('http://127.0.0.1:8000/get-lecture-recorded-video-name/?lecturer=' + global_lecturer + '&subject=' + global_subject + '&date=' + global_lecture_date)
.then((res) => res.json())
.then((out) => assignLecturerRecordedVideoName(out))
.catch((err) => alert('error: ' + err));
{#global_lecturer_video_name = "Test_1.mp4";#}
{#global_lecturer_video_name = "Test_2.mp4";#}
{#global_lecturer_video_name = "Test_3.mp4";#}
$('#integrate_modal').modal();
//fetch data from the API
......@@ -268,6 +266,21 @@
});
//assign the lecturer recorded video name
function assignLecturerRecordedVideoName(res) {
global_lecturer_video_name = res.video_name;
//define the lecturer video src
let lecturer_video_src = "{% static '' %}FirstApp/lecturer_videos/" + global_lecturer_video_name;
//assign the video src
$('#lecturer_video').attr('src', lecturer_video_src);
$('#integrate_modal').modal();
}
//this function will load the activity recognition for frames
function displayActivityRecognitionForFrame(response) {
//hide the loader
......@@ -527,11 +540,11 @@
{% load static %}
<!-- Page Heading -->
{# <div class="d-sm-flex align-items-center justify-content-between mb-4">#}
{# <h1 class="h3 mb-0 text-gray-800">Student Activity Recognition</h1>#}
{# <button type="button" data-target="#generateReportModal" data-toggle="modal" class="d-none d-sm-inline-block btn btn-sm btn-primary shadow-sm" id="generate_report_before" disabled><i#}
{# class="fas fa-download fa-sm text-white-50"></i> Generate Report</button>#}
{# </div>#}
<div class="d-sm-flex align-items-center justify-content-between mb-4">
<h1 class="h3 mb-0 text-gray-800">Student Activity Recognition</h1>
{# <button type="button" data-target="#generateReportModal" data-toggle="modal" class="d-none d-sm-inline-block btn btn-sm btn-primary shadow-sm" id="generate_report_before" disabled><i#}
{# class="fas fa-download fa-sm text-white-50"></i> Generate Report</button>#}
</div>
<!--first row -->
......
......@@ -203,6 +203,21 @@ urlpatterns = [
# retrieves lecture activity summary
url(r'^get-lecture-gaze-summary/$', api.GetLectureGazeSummary.as_view()),
# retrieves lecture activity summary
url(r'^get-activity-correlations/$', api.GetLectureActivityCorrelations.as_view()),
# retrieves lecture activity summary
url(r'^get-emotion-correlations/$', api.GetLectureEmotionCorrelations.as_view()),
# retrieves lecture activity summary
url(r'^get-gaze-correlations/$', api.GetLectureGazeCorrelations.as_view()),
##### OTHERS #####
# retrieves lecture recorded video name
url(r'^get-lecture-recorded-video-name/$', api.GetLecturerRecordedVideo.as_view()),
# routers
# path('', include(router.urls)),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment