Commit a03e6d94 authored by I.K Seneviratne's avatar I.K Seneviratne

Committing the partial implementation of the lecture video summary time...

Committing the partial implementation of the lecture video summary time landmarks in Lecturer Home Page
parent 7bcf0851
......@@ -12,6 +12,7 @@ from . import emotion_detector as ed
from .logic import id_generator as ig
from .logic import pdf_file_generator as pdf
from .logic import head_gaze_estimation as hge
from .logic import video_extraction as ve
from .models import Teachers, Video, VideoMeta, RegisterUser
from .MongoModels import *
from .serializers import *
......@@ -736,3 +737,15 @@ class GetStudentBehaviorSummaryForPeriod(APIView):
"gaze_estimation_labels": gaze_estimation_labels,
"isRecordFound": isRecordFound
})
# this API will retrieve lecture video summary time landmarks
class GetLectureVideoSummaryTimeLandmarks(APIView):
def get(self, request):
video_name = request.query_params.get('video_name')
time_landmarks = ve.getTimeLandmarks(video_name)
return Response({
"response": time_landmarks
})
\ No newline at end of file
import os
import cv2
import shutil
import datetime
def VideoExtractor(request):
......@@ -68,3 +69,50 @@ def getExtractedFrames(request):
else:
return "No extracted frames were found"
# this method will retrieve the time landmarks for a lecture video
def getTimeLandmarks(video_name):
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
VIDEO_PATH = os.path.join(BASE_DIR, "assets\\FirstApp\\videos\\{}".format(video_name))
# iteration
video = cv2.VideoCapture(VIDEO_PATH)
no_of_frames = video.get(cv2.CAP_PROP_FRAME_COUNT)
fps = int(video.get(cv2.CAP_PROP_FPS))
frame_count = 0
# calculating the duration in seconds
duration = int(no_of_frames / fps)
# define the number of time gaps required
THRESHOLD_GAP = 5
# calculating the real duration
real_duration = datetime.timedelta(seconds=(duration+THRESHOLD_GAP))
# defines the number of seconds included for a frame group
THRESHOLD_TIME = 10
# define an unit gap
unit_gap = int(duration / THRESHOLD_GAP)
initial_landmark = 0
time_landmarks = ['0:00:00']
time_landmarks_values = [0]
# loop through the threshold gap limit to define the time landmarks
for i in range(THRESHOLD_GAP):
initial_landmark += unit_gap
time_landmark = str(datetime.timedelta(seconds=initial_landmark))
time_landmark_value = initial_landmark
time_landmarks.append(time_landmark)
time_landmarks_values.append(time_landmark_value)
# append the final time
time_landmarks.append(str(real_duration))
time_landmarks_values.append(duration)
return time_landmarks
\ No newline at end of file
......@@ -35,6 +35,7 @@
var global_lecture_video_id = '';
var global_video_name = '';
var student_behavior_summary = {};
var lecture_video_time_landmarks = [];
$(document).ready(function () {
......@@ -223,11 +224,26 @@
$('#looking_front_width').width(act.looking_front_perct + '%');
});
//fetch the video time landmark details
fetch('http://127.0.0.1:8000/get-lecture-video-summary-time-landmarks/?video_name=' + global_video_name)
.then((res) => res.json())
.then((out) => assignTimeLandmarks(out.response))
.catch((err) => alert('error: ' + err));
//display the progress bar area
$('#progress_areas').attr('hidden', false);
}
//this function will assign lecture video time landmarks
function assignTimeLandmarks(response) {
lecture_video_time_landmarks = response;
}
//this function will handle the 'summary' button
$('#summary_btn').click(function () {
......@@ -245,6 +261,15 @@
//this function will call the chart function
function renderChart() {
let datapoints = [];
//iterate through the time landmarks
for (let i = 0; i < lecture_video_time_landmarks.length; i++) {
let point = {label: lecture_video_time_landmarks[i], y: Number(Math.round(Math.random() * 1000), 0)};
datapoints.push(point);
}
var chart = new CanvasJS.Chart("chartContainer", {
animationEnabled: true,
theme: "light2",
......@@ -285,67 +310,8 @@
{#xValueFormatString: "DD MMM, YYYY",#}
xValueFormatString: "hh:mm:ss",
color: "#000000",
dataPoints: [
{x: new Date(2017, 0, 3, 0, 2, 0), y: 650},
{x: new Date(2017, 0, 4, 0, 4, 0), y: 700},
{x: new Date(2017, 0, 5, 0, 6, 0), y: 710},
{x: new Date(2017, 0, 6, 0, 8, 0), y: 658},
{x: new Date(2017, 0, 7, 0, 10, 0), y: 734},
{x: new Date(2017, 0, 8, 0, 12, 0), y: 963},
{x: new Date(2017, 0, 9, 0, 14, 0), y: 847},
{x: new Date(2017, 0, 10, 0, 16, 0), y: 853},
{x: new Date(2017, 0, 11, 0, 18, 0), y: 869},
{x: new Date(2017, 0, 12, 0, 20, 0), y: 943},
{x: new Date(2017, 0, 13, 0, 22, 0), y: 970},
{x: new Date(2017, 0, 14, 0, 24, 0), y: 869},
{x: new Date(2017, 0, 15, 0, 26, 0), y: 890},
{x: new Date(2017, 0, 16, 0, 28, 0), y: 930}
]
},
{
type: "line",
showInLegend: true,
name: "Emotion",
lineDashType: "dash",
dataPoints: [
{x: new Date(2017, 0, 3, 0, 2, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 4, 0, 4, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 5, 0, 6, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 6, 0, 8, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 7, 0, 10, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 8, 0, 12, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 9, 0, 14, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 10, 0, 16, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 11, 0, 18, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 12, 0, 20, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 13, 0, 22, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 14, 0, 24, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 15, 0, 26, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 16, 0, 28, 0), y: Number(Math.round(Math.random() * 1000), 0)}
]
},
{
type: "line",
showInLegend: true,
name: "Gaze",
lineDashType: "dash",
dataPoints: [
{x: new Date(2017, 0, 3, 0, 2, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 4, 0, 4, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 5, 0, 6, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 6, 0, 8, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 7, 0, 10, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 8, 0, 12, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 9, 0, 14, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 10, 0, 16, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 11, 0, 18, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 12, 0, 20, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 13, 0, 22, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 14, 0, 24, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 15, 0, 26, 0), y: Number(Math.round(Math.random() * 1000), 0)},
{x: new Date(2017, 0, 16, 0, 28, 0), y: Number(Math.round(Math.random() * 1000), 0)}
]
}]
dataPoints: datapoints
}]
});
chart.render();
}
......
......@@ -164,12 +164,15 @@ urlpatterns = [
url(r'^get-lecture-gaze-estimation-for-frame/$', api.GetLectureGazeEstimationForFrames.as_view()),
#####===== DATA VISUALIZATION =====#####
##### VIEW STUDENT BEHAVIOR SUMMARY SECTION #####
# retrieves student behavior summary for specified time period
url(r'^get-student-behavior-summary-for-period/$', api.GetStudentBehaviorSummaryForPeriod.as_view()),
# retrieves lecture video summary time landmarks
url(r'^get-lecture-video-summary-time-landmarks/$', api.GetLectureVideoSummaryTimeLandmarks.as_view()),
# routers
# path('', include(router.urls)),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment