Commit 7af15089 authored by Dakshina Weerasiri 's avatar Dakshina Weerasiri

Facial expressions

parent c88ebc61
......@@ -24,3 +24,33 @@ const FIREBASE_CONFIG = {
const app = initializeApp(FIREBASE_CONFIG);
export const fileStorage = getStorage(app);
export const emotionsData = [
{
angry: 0.01,
disgust: 0.0,
fear: 0.2,
happy: 0.18,
sad: 0.02,
surprise: 0.4,
neutral: 0.19,
},
{
angry: 0.02,
disgust: 0.1,
fear: 0.3,
happy: 0.28,
sad: 0.03,
surprise: 1.4,
neutral: 0.29,
},
{
angry: 1.02,
disgust: 0.2,
fear: 1.3,
happy: 2.28,
sad: 0.3,
surprise: 2.4,
neutral: 0.5,
},
];
......@@ -43,7 +43,7 @@ export const request = (
data,
params,
headers,
timeout: 30000,
timeout: 90000,
})
.then(async (response: AxiosResponse) => {
logger("RESPONSE: ", response);
......
......@@ -66,6 +66,7 @@ export const getStatusColor = (status?: ApplicationType["status"]) => {
};
export const getAddress = (address: AddressType) => {
if (!address) return "";
return `${address.addressLine}, ${address.city}, ${address.country}`;
};
......
......@@ -57,6 +57,12 @@ function* applyForJob({
candidate: profile._id,
job: payload,
status: "Pending",
interview: {
date: "",
time: "",
link: "",
voiceVerification: "Pending",
},
},
resumeUrl: profile.resume,
}
......
......@@ -147,9 +147,30 @@ export type ApplicationType = {
time: string;
link: string;
videoRef?: string;
voiceVerification?: string;
};
};
export type EmotionsPayloadType = {
angry: number;
disgust: number;
fear: number;
happy: number;
sad: number;
surprise: number;
neutral: number;
};
export type EmotionsType = {
data: [
{
box: number[];
emotions: EmotionsPayloadType;
}
];
end: boolean;
};
//PAYLOADS
export type SignUpPayload = {
passwords: string[];
......@@ -201,7 +222,7 @@ export type ApplicationPayloadType = {
time: string;
link: string;
videoRef?: string;
voiceVerification?: number;
voiceVerification?: string;
};
score: {
primary: number;
......
import React, { useState, useRef, ChangeEvent } from "react";
import React, { useState, useRef, ChangeEvent, useEffect } from "react";
import {
AreaChart,
Tooltip,
Area,
ResponsiveContainer,
XAxis,
YAxis,
} from "recharts";
import { getDownloadURL, ref, uploadBytesResumable } from "firebase/storage";
import moment from "moment";
import ReactPlayer from "react-player";
import { OnProgressProps } from "react-player/base";
import CommonAPI from "../../common/apis/common";
import { ApplicationPayloadType } from "../../common/types";
import {
ApplicationPayloadType,
EmotionsPayloadType,
EmotionsType,
} from "../../common/types";
import Progress from "../Progress";
import { fileStorage } from "../../common/config";
import { useDispatch } from "react-redux";
......@@ -16,7 +28,8 @@ type OwnProps = {
};
type Analysis = {
voice: string;
voice?: string;
eyeBlinks?: number;
};
const Interview = ({ application }: OwnProps) => {
......@@ -26,10 +39,40 @@ const Interview = ({ application }: OwnProps) => {
const [startTime, setStartTime] = useState<number>(0);
const [progress, setProgress] = useState<number>(0);
const [endTime, setEndTime] = useState<number>(120);
const [emotions, setEmotions] = useState<EmotionsPayloadType[]>([]);
const [paddingLeft, setPaddingLeft] = useState<number>(0);
const [isFacialAnalyzing, setIsFacialAnalyzing] = useState<boolean>(false);
const _selectedTime = useRef<number>(0);
const _video = useRef<ReactPlayer>(null);
const _hiddenFileInput = useRef<HTMLInputElement>(null);
const onPressStartFacialExpression = () => {
setIsFacialAnalyzing(true);
const url = `ws://127.0.0.1:8000/facial/ws/emotions?application_id=${application._id}`;
const ws = new WebSocket(url);
ws.onmessage = (e) => {
const socketData = JSON.parse(e.data) as EmotionsType;
if (socketData.end) {
setIsFacialAnalyzing(false);
ws.close();
} else {
const emotionData: EmotionsPayloadType = socketData.data[0]
?.emotions || {
angry: 0,
disgust: 0,
fear: 0,
happy: 0,
sad: 0,
surprise: 0,
neutral: 0,
};
setEmotions((old) => [...old, emotionData]);
setPaddingLeft((_old) => _old - 40);
}
};
return () => ws.close();
};
const onSeek = (e: number) => (_selectedTime.current = e);
const onProgress = (e: OnProgressProps) =>
(_selectedTime.current = e.playedSeconds);
......@@ -39,7 +82,7 @@ const Interview = ({ application }: OwnProps) => {
const displayStartTime = `${moment.utc(startTime * 1000).format("HH:mm:ss")}`;
const displayEndTime = `${moment.utc(endTime * 1000).format("HH:mm:ss")}`;
const onClickAnalyse = () => {
const onClickAnalyseVoice = () => {
if (startTime !== endTime) {
setIsLoading(true);
CommonAPI.analyseInterview({
......@@ -56,6 +99,34 @@ const Interview = ({ application }: OwnProps) => {
});
}
};
const onClickAnalyseBlinks = () => {
if (startTime !== endTime) {
setIsLoading(true);
const url = `ws://127.0.0.1:8000/facial/ws/eye-blinks?application_id=${
application._id
}&start=${Math.round(startTime)}&end=${Math.round(endTime)}`;
const ws = new WebSocket(url);
ws.onmessage = (e) => {
const socketData = JSON.parse(e.data) as any;
if (socketData.end) {
const mins = (Math.round(endTime) - Math.round(startTime)) / 60;
const ratio = Math.ceil(socketData.count / mins);
if (analysis) {
setAnalysis({ ...analysis, eyeBlinks: ratio });
} else {
setAnalysis({ eyeBlinks: ratio });
}
setIsLoading(false);
ws.close();
}
};
return () => ws.close();
}
};
const onSelectVideo = (e: ChangeEvent<HTMLInputElement>) => {
if (e.target.files) {
setIsLoading(true);
......@@ -127,15 +198,18 @@ const Interview = ({ application }: OwnProps) => {
const renderVoiceVerification = () => {
if (application.interview?.voiceVerification) {
const score = application?.interview?.voiceVerification * 100;
if (application.interview.voiceVerification === "Pending") {
return (
<strong className="text-primary">Voice verification: Pending</strong>
);
}
const verification = Number(application?.interview?.voiceVerification);
const score = verification * 100;
const percentage = `${score?.toFixed(2)}%`;
return (
<strong
className={getVerificationColor(
application?.interview?.voiceVerification * 100
)}
>
<strong className={getVerificationColor(verification * 100)}>
Voice verification: {percentage}
</strong>
);
......@@ -157,7 +231,7 @@ const Interview = ({ application }: OwnProps) => {
{renderVoiceVerification()}
<div className="row" style={{ padding: "12px" }}>
<div className="col-sm-4">
<div className="col-sm-6">
<div className="row">
<button
type="button"
......@@ -177,7 +251,7 @@ const Interview = ({ application }: OwnProps) => {
</div>
</div>
</div>
<div className="col-sm-4">
<div className="col-sm-6">
<div className="row">
<button
type="button"
......@@ -197,31 +271,144 @@ const Interview = ({ application }: OwnProps) => {
</div>
</div>
</div>
<div className="col-sm-4">
<div className="row">
<div className="col-sm-8">
<button
type="button"
className="btn btn-primary w-100"
onClick={onClickAnalyse}
disabled={isLoading}
>
Analyse
</button>
</div>
{renderLoading}
</div>
</div>
</div>
<div className="row mt-2">
<div className="mb-3 row">
<label className="col-sm-2 col-form-label">Voice</label>
<label className="col-sm-5 col-form-label">
<strong>{analysis?.voice}</strong>
<div className="col-sm-3">
<label className="col-form-label">
<strong>Vocal expression</strong>
</label>
<div className="row">
<div className="col-sm-8">
<button
type="button"
className="btn btn-primary w-100 btn-sm"
onClick={onClickAnalyseVoice}
disabled={isLoading || isFacialAnalyzing}
>
Analyse
</button>
</div>
{renderLoading}
</div>
</div>
<label className="col-sm-3 col-form-label">{analysis?.voice}</label>
<div className="col-sm-3 ">
<label className="col-form-label">
<strong>Eye blink ratio</strong>
</label>
<div className="row">
<div className="col-sm-8">
<button
type="button"
className="btn btn-primary w-100 btn-sm"
onClick={onClickAnalyseBlinks}
disabled={isLoading || isFacialAnalyzing}
>
Analyse
</button>
</div>
{renderLoading}
</div>
</div>
<label className="col-sm-3 col-form-label">
<strong>{analysis?.eyeBlinks} (blinks per min)</strong>
</label>
</div>
</div>
<label className="col-sm-2 col-form-label">
<strong>Facial expressions</strong>
</label>
{emotions.length === 0 ? (
<div className="upload-interview-video">
<button
type="button"
className="btn btn-secondary"
onClick={onPressStartFacialExpression}
disabled={isLoading}
>
Start analysis
</button>
</div>
) : (
<ResponsiveContainer
height={280}
width="100%"
key={`rc_${emotions.length}`}
>
<AreaChart data={emotions} key={`ac_${emotions.length}`}>
<XAxis dataKey="Name" padding={{ left: paddingLeft }} />
<YAxis />
<Tooltip isAnimationActive={false} />
<Area
type="monotone"
dataKey="angry"
stroke="red"
fillOpacity={0.2}
fill="red"
key={`angry_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="disgust"
stroke="blue"
fillOpacity={0.2}
fill="blue"
key={`disgust_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="fear"
stroke="black"
fillOpacity={0.2}
fill="black"
key={`fear_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="happy"
stroke="yellow"
fillOpacity={0.2}
fill="yellow"
key={`happy_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="sad"
stroke="gray"
fillOpacity={0.2}
fill="gray"
key={`gray_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="surprise"
stroke="purple"
fillOpacity={0.2}
fill="purple"
key={`purple_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="neutral"
stroke="brown"
fillOpacity={0.2}
fill="brown"
key={`neutral_${emotions.length}`}
isAnimationActive={false}
/>
</AreaChart>
</ResponsiveContainer>
)}
</div>
);
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment