Commit 2ca478e8 authored by Birahavi Kugathasan's avatar Birahavi Kugathasan

Generate interview questions from openAI

parent e9e446b1
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
"axios": "^1.3.4", "axios": "^1.3.4",
"firebase": "^9.17.2", "firebase": "^9.17.2",
"moment": "^2.29.4", "moment": "^2.29.4",
"openai": "^3.2.1",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
"react-easy-crop": "^4.7.4", "react-easy-crop": "^4.7.4",
......
...@@ -54,3 +54,5 @@ export const emotionsData = [ ...@@ -54,3 +54,5 @@ export const emotionsData = [
neutral: 0.5, neutral: 0.5,
}, },
]; ];
export const OPEN_API_KEY = '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'
\ No newline at end of file
import { Configuration, OpenAIApi } from "openai";
import { OPEN_API_KEY } from "../config";
const configuration = new Configuration({
organization: "org-AoUQNrvNFgE7OsNnQWErMvxy",
apiKey: OPEN_API_KEY,
});
const openai = new OpenAIApi(configuration);
export default class OpenAPI {
static getInterViewQuestions =(subject:string, level:string) => openai.createCompletion({
model:'text-davinci-003',
prompt:`Create a list of ${level} questions with answers for my interview regarding ${subject}`,
temperature:0.5,
max_tokens:150,
top_p:1.0,
frequency_penalty:0.0,
presence_penalty:0.0
})
}
\ No newline at end of file
...@@ -228,3 +228,9 @@ ...@@ -228,3 +228,9 @@
display: none; display: none;
} }
} }
.questions-container{
background-color: #ffefab80;
min-height: 400px;
padding: 0 20px;
}
\ No newline at end of file
import React, { useState, useRef, ChangeEvent, useEffect } from "react";
import {
AreaChart,
Tooltip,
Area,
ResponsiveContainer,
XAxis,
YAxis,
} from "recharts";
import { getDownloadURL, ref, uploadBytesResumable } from "firebase/storage";
import moment from "moment";
import ReactPlayer from "react-player";
import { OnProgressProps } from "react-player/base";
import CommonAPI from "../../common/apis/common";
import {
ApplicationPayloadType,
EmotionsPayloadType,
EmotionsType,
} from "../../common/types";
import Progress from "../Progress";
import { fileStorage } from "../../common/config";
import { useDispatch } from "react-redux";
import { getJobs, updateApplication } from "../../common/actions/common";
import { getVerificationColor } from "../../common/lib/util";
type OwnProps = {
application: ApplicationPayloadType;
};
type Analysis = {
voice?: string;
eyeBlinks?: number;
};
const Analyse = ({ application }: OwnProps) => {
const dispatch = useDispatch();
const [analysis, setAnalysis] = useState<Analysis | null>(null);
const [isLoading, setIsLoading] = useState<boolean>(false);
const [startTime, setStartTime] = useState<number>(0);
const [progress, setProgress] = useState<number>(0);
const [endTime, setEndTime] = useState<number>(120);
const [emotions, setEmotions] = useState<EmotionsPayloadType[]>([]);
const [paddingLeft, setPaddingLeft] = useState<number>(0);
const [isFacialAnalyzing, setIsFacialAnalyzing] = useState<boolean>(false);
const _selectedTime = useRef<number>(0);
const _video = useRef<ReactPlayer>(null);
const _hiddenFileInput = useRef<HTMLInputElement>(null);
useEffect(() => {
dispatch(getJobs());
}, []);
const onPressStartFacialExpression = () => {
setIsFacialAnalyzing(true);
const url = `ws://127.0.0.1:8000/facial/ws/emotions?application_id=${application._id}`;
const ws = new WebSocket(url);
ws.onmessage = (e) => {
const socketData = JSON.parse(e.data) as EmotionsType;
if (socketData.end) {
setIsFacialAnalyzing(false);
ws.close();
} else {
const emotionData: EmotionsPayloadType = socketData.data[0]
?.emotions || {
angry: 0,
disgust: 0,
fear: 0,
happy: 0,
sad: 0,
surprise: 0,
neutral: 0,
};
setEmotions((old) => [...old, emotionData]);
setPaddingLeft((_old) => _old - 40);
}
};
return () => ws.close();
};
const onSeek = (e: number) => (_selectedTime.current = e);
const onProgress = (e: OnProgressProps) =>
(_selectedTime.current = e.playedSeconds);
const onSetStartTime = () => setStartTime(_selectedTime.current);
const onSetEndTime = () => setEndTime(_selectedTime.current);
const displayStartTime = `${moment.utc(startTime * 1000).format("HH:mm:ss")}`;
const displayEndTime = `${moment.utc(endTime * 1000).format("HH:mm:ss")}`;
const onClickAnalyseVoice = () => {
if (startTime !== endTime) {
setIsLoading(true);
CommonAPI.analyseInterview({
startTime,
endTime,
applicationId: application._id,
})
.then((res: any) => {
setAnalysis(res);
setIsLoading(false);
})
.catch((err) => {
setIsLoading(false);
});
}
};
const onClickAnalyseBlinks = () => {
if (startTime !== endTime) {
setIsLoading(true);
const url = `ws://127.0.0.1:8000/facial/ws/eye-blinks?application_id=${
application._id
}&start=${Math.round(startTime)}&end=${Math.round(endTime)}`;
const ws = new WebSocket(url);
ws.onmessage = (e) => {
const socketData = JSON.parse(e.data) as any;
if (socketData.end) {
const mins = (Math.round(endTime) - Math.round(startTime)) / 60;
const ratio = Math.ceil(socketData.count / mins);
if (analysis) {
setAnalysis({ ...analysis, eyeBlinks: ratio });
} else {
setAnalysis({ eyeBlinks: ratio });
}
setIsLoading(false);
ws.close();
}
};
return () => ws.close();
}
};
const onSelectVideo = (e: ChangeEvent<HTMLInputElement>) => {
if (e.target.files) {
setIsLoading(true);
const path = `/interviews/${application._id}.mp4`;
const uploadRef = ref(fileStorage, path);
const uploadTask = uploadBytesResumable(uploadRef, e.target.files[0]);
uploadTask.on(
"state_changed",
(snapshot) => {
const percent = Math.round(
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
);
setProgress(percent);
},
(err) => {},
() => {
getDownloadURL(uploadTask.snapshot.ref).then((url) => {
dispatch(
updateApplication(
{
applicationId: application._id,
candidateId: application.candidate._id || "",
update: { interview: { videoRef: url } },
},
() => {
setProgress(0);
setIsLoading(false);
}
)
);
});
}
);
}
};
const onClickUpload = () => {
_hiddenFileInput?.current?.click();
};
const renderLoading = isLoading && (
<div className="spinner-border text-primary" role="status">
<span className="visually-hidden">Loading...</span>
</div>
);
if (!application.interview?.videoRef) {
return (
<>
<div className="upload-interview-video">
<button
type="button"
className="btn btn-secondary"
onClick={onClickUpload}
disabled={isLoading}
>
Upload video
</button>
<input
type="file"
accept="video/mp4"
ref={_hiddenFileInput}
onChange={onSelectVideo}
/>
</div>
<Progress progress={progress} />
</>
);
}
const renderVoiceVerification = () => {
if (application.interview?.voiceVerification) {
if (application.interview.voiceVerification === "Pending") {
return (
<strong className="text-primary">Voice verification: Pending</strong>
);
}
const verification = Number(application?.interview?.voiceVerification);
const score = verification * 100;
const percentage = `${score?.toFixed(2)}%`;
return (
<strong className={getVerificationColor(verification * 100)}>
Voice verification: {percentage}
</strong>
);
}
return (
<strong className="text-primary">Voice verification: Pending</strong>
);
};
return (
<div>
<ReactPlayer
ref={_video}
url={application.interview.videoRef}
controls
height="100%"
width="100%"
onSeek={onSeek}
onProgress={onProgress}
/>
{renderVoiceVerification()}
<div className="row" style={{ padding: "12px" }}>
<div className="col-sm-6">
<div className="row">
<button
type="button"
className="col-sm-4 btn btn-warning btn-sm"
onClick={onSetStartTime}
>
Start time
</button>
<div className="col-sm-8">
<input
type="text"
className="form-control"
name="startTime"
disabled
value={displayStartTime}
/>
</div>
</div>
</div>
<div className="col-sm-6">
<div className="row">
<button
type="button"
className="col-sm-4 btn btn-warning btn-sm"
onClick={onSetEndTime}
>
End time
</button>
<div className="col-sm-8">
<input
type="text"
className="form-control"
name="endTime"
disabled
value={displayEndTime}
/>
</div>
</div>
</div>
</div>
<div className="row mt-2">
<div className="mb-3 row">
<div className="col-sm-3">
<label className="col-form-label">
<strong>Vocal expression</strong>
</label>
<div className="row">
<div className="col-sm-8">
<button
type="button"
className="btn btn-primary w-100 btn-sm"
onClick={onClickAnalyseVoice}
disabled={isLoading || isFacialAnalyzing}
>
Analyse
</button>
</div>
{renderLoading}
</div>
</div>
<label className="col-sm-3 col-form-label">{analysis?.voice}</label>
<div className="col-sm-3 ">
<label className="col-form-label">
<strong>Eye blink ratio</strong>
</label>
<div className="row">
<div className="col-sm-8">
<button
type="button"
className="btn btn-primary w-100 btn-sm"
onClick={onClickAnalyseBlinks}
disabled={isLoading || isFacialAnalyzing}
>
Analyse
</button>
</div>
{renderLoading}
</div>
</div>
<label className="col-sm-3 col-form-label">
<strong>{analysis?.eyeBlinks} (blinks per min)</strong>
</label>
</div>
</div>
<label className="col-sm-2 col-form-label">
<strong>Facial expressions</strong>
</label>
{emotions.length === 0 ? (
<div className="upload-interview-video">
<button
type="button"
className="btn btn-secondary"
onClick={onPressStartFacialExpression}
disabled={isLoading}
>
Start analysis
</button>
</div>
) : (
<ResponsiveContainer
height={280}
width="100%"
key={`rc_${emotions.length}`}
>
<AreaChart data={emotions} key={`ac_${emotions.length}`}>
<XAxis dataKey="Name" padding={{ left: paddingLeft }} />
<YAxis />
<Tooltip isAnimationActive={false} />
<Area
type="monotone"
dataKey="angry"
stroke="red"
fillOpacity={0.2}
fill="red"
key={`angry_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="disgust"
stroke="blue"
fillOpacity={0.2}
fill="blue"
key={`disgust_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="fear"
stroke="black"
fillOpacity={0.2}
fill="black"
key={`fear_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="happy"
stroke="yellow"
fillOpacity={0.2}
fill="yellow"
key={`happy_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="sad"
stroke="gray"
fillOpacity={0.2}
fill="gray"
key={`gray_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="surprise"
stroke="purple"
fillOpacity={0.2}
fill="purple"
key={`purple_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="neutral"
stroke="brown"
fillOpacity={0.2}
fill="brown"
key={`neutral_${emotions.length}`}
isAnimationActive={false}
/>
</AreaChart>
</ResponsiveContainer>
)}
</div>
);
};
export default Analyse;
import React, { useState, useRef, ChangeEvent, useEffect } from "react"; import React, {useState, useRef, ChangeEvent} from 'react'
import { import {CreateCompletionResponseChoicesInner } from "openai";
AreaChart, import { ApplicationPayloadType } from '../../common/types';
Tooltip, import OpenAPI from '../../common/lib/openApi';
Area,
ResponsiveContainer,
XAxis,
YAxis,
} from "recharts";
import { getDownloadURL, ref, uploadBytesResumable } from "firebase/storage";
import moment from "moment";
import ReactPlayer from "react-player";
import { OnProgressProps } from "react-player/base";
import CommonAPI from "../../common/apis/common";
import {
ApplicationPayloadType,
EmotionsPayloadType,
EmotionsType,
} from "../../common/types";
import Progress from "../Progress";
import { fileStorage } from "../../common/config";
import { useDispatch } from "react-redux";
import { getJobs, updateApplication } from "../../common/actions/common";
import { getVerificationColor } from "../../common/lib/util";
type OwnProps = {
application: ApplicationPayloadType;
};
type Analysis = {
voice?: string;
eyeBlinks?: number;
};
const Interview = ({ application }: OwnProps) => {
const dispatch = useDispatch();
const [analysis, setAnalysis] = useState<Analysis | null>(null);
const [isLoading, setIsLoading] = useState<boolean>(false);
const [startTime, setStartTime] = useState<number>(0);
const [progress, setProgress] = useState<number>(0);
const [endTime, setEndTime] = useState<number>(120);
const [emotions, setEmotions] = useState<EmotionsPayloadType[]>([]);
const [paddingLeft, setPaddingLeft] = useState<number>(0);
const [isFacialAnalyzing, setIsFacialAnalyzing] = useState<boolean>(false);
const _selectedTime = useRef<number>(0);
const _video = useRef<ReactPlayer>(null);
const _hiddenFileInput = useRef<HTMLInputElement>(null);
useEffect(() => {
dispatch(getJobs());
}, []);
const onPressStartFacialExpression = () => {
setIsFacialAnalyzing(true);
const url = `ws://127.0.0.1:8000/facial/ws/emotions?application_id=${application._id}`;
const ws = new WebSocket(url);
ws.onmessage = (e) => {
const socketData = JSON.parse(e.data) as EmotionsType;
if (socketData.end) {
setIsFacialAnalyzing(false);
ws.close();
} else {
const emotionData: EmotionsPayloadType = socketData.data[0]
?.emotions || {
angry: 0,
disgust: 0,
fear: 0,
happy: 0,
sad: 0,
surprise: 0,
neutral: 0,
};
setEmotions((old) => [...old, emotionData]);
setPaddingLeft((_old) => _old - 40);
}
};
return () => ws.close();
};
const onSeek = (e: number) => (_selectedTime.current = e);
const onProgress = (e: OnProgressProps) =>
(_selectedTime.current = e.playedSeconds);
const onSetStartTime = () => setStartTime(_selectedTime.current);
const onSetEndTime = () => setEndTime(_selectedTime.current);
const displayStartTime = `${moment.utc(startTime * 1000).format("HH:mm:ss")}`;
const displayEndTime = `${moment.utc(endTime * 1000).format("HH:mm:ss")}`;
const onClickAnalyseVoice = () => { type OwnProps = {
if (startTime !== endTime) { application: ApplicationPayloadType;
setIsLoading(true); }
CommonAPI.analyseInterview({
startTime, const Interview = ({application}:OwnProps)=>{
endTime, const [isLoading, setIsLoading] = useState<boolean>(false)
applicationId: application._id, const [questions, setQuestions] = useState<CreateCompletionResponseChoicesInner[]>([])
}) const subject = useRef<string>('')
.then((res: any) => { const level = useRef<string>('easy')
setAnalysis(res);
setIsLoading(false); const onPressGenerate=()=>{
}) if(subject.current.length>0){
.catch((err) => { setQuestions([])
setIsLoading(false); setIsLoading(true)
}); OpenAPI.getInterViewQuestions(subject.current, level.current).then(res=>{
} setQuestions(res.data.choices)
}; setIsLoading(false)
})
const onClickAnalyseBlinks = () => {
if (startTime !== endTime) {
setIsLoading(true);
const url = `ws://127.0.0.1:8000/facial/ws/eye-blinks?application_id=${
application._id
}&start=${Math.round(startTime)}&end=${Math.round(endTime)}`;
const ws = new WebSocket(url);
ws.onmessage = (e) => {
const socketData = JSON.parse(e.data) as any;
if (socketData.end) {
const mins = (Math.round(endTime) - Math.round(startTime)) / 60;
const ratio = Math.ceil(socketData.count / mins);
if (analysis) {
setAnalysis({ ...analysis, eyeBlinks: ratio });
} else {
setAnalysis({ eyeBlinks: ratio });
}
setIsLoading(false);
ws.close();
} }
};
return () => ws.close();
} }
};
const onSelectVideo = (e: ChangeEvent<HTMLInputElement>) => { const onChange=(e:ChangeEvent<HTMLInputElement>)=>{
if (e.target.files) { subject.current = e.target.value
setIsLoading(true);
const path = `/interviews/${application._id}.mp4`;
const uploadRef = ref(fileStorage, path);
const uploadTask = uploadBytesResumable(uploadRef, e.target.files[0]);
uploadTask.on(
"state_changed",
(snapshot) => {
const percent = Math.round(
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
);
setProgress(percent);
},
(err) => {},
() => {
getDownloadURL(uploadTask.snapshot.ref).then((url) => {
dispatch(
updateApplication(
{
applicationId: application._id,
candidateId: application.candidate._id || "",
update: { interview: { videoRef: url } },
},
() => {
setProgress(0);
setIsLoading(false);
}
)
);
});
}
);
} }
};
const onClickUpload = () => {
_hiddenFileInput?.current?.click();
};
const renderLoading = isLoading && (
<div className="spinner-border text-primary" role="status">
<span className="visually-hidden">Loading...</span>
</div>
);
if (!application.interview?.videoRef) { const onSelectLevel = (e: ChangeEvent<HTMLSelectElement>) => {
return ( level.current = e.target.value
<> }
<div className="upload-interview-video">
<button
type="button"
className="btn btn-secondary"
onClick={onClickUpload}
disabled={isLoading}
>
Upload video
</button>
<input
type="file"
accept="video/mp4"
ref={_hiddenFileInput}
onChange={onSelectVideo}
/>
</div>
<Progress progress={progress} />
</>
);
}
const renderVoiceVerification = () => { const renderQuestions=(question:CreateCompletionResponseChoicesInner, index:number)=>{
if (application.interview?.voiceVerification) { return (<p style={{whiteSpace:'pre-line'}}>{question.text}</p>)
if (application.interview.voiceVerification === "Pending") { }
return (
<strong className="text-primary">Voice verification: Pending</strong>
);
}
const verification = Number(application?.interview?.voiceVerification); const btnText = isLoading?'Generating..':'Generate'
const score = verification * 100;
const percentage = `${score?.toFixed(2)}%`;
return (
<strong className={getVerificationColor(verification * 100)}>
Voice verification: {percentage}
</strong>
);
}
return ( return (
<strong className="text-primary">Voice verification: Pending</strong>
);
};
return (
<div> <div>
<ReactPlayer <h4>Questions Generator</h4>
ref={_video} <div className="row g-3" >
url={application.interview.videoRef} <label className="col-3 col-form-label" >Subject</label>
controls <div className="col-6">
height="100%" <div className="row g-3" >
width="100%" <div className="col-8"> <input type="text" className="form-control" onChange={onChange} /></div>
onSeek={onSeek} <div className="col-4">
onProgress={onProgress} <select className="form-select" aria-label="Default select example" onChange={onSelectLevel}>
/> <option value="easy">Easy</option>
{renderVoiceVerification()} <option value="intermediate">Medium</option>
<option value="hard">Hard</option>
<div className="row" style={{ padding: "12px" }}> </select>
<div className="col-sm-6"> </div>
<div className="row"> </div>
<button
type="button"
className="col-sm-4 btn btn-warning btn-sm"
onClick={onSetStartTime}
>
Start time
</button>
<div className="col-sm-8">
<input
type="text"
className="form-control"
name="startTime"
disabled
value={displayStartTime}
/>
</div> </div>
</div> <div className="col-3">
</div> <button className="btn btn-warning mb-3" onClick={onPressGenerate} disabled={isLoading} >{btnText}</button>
<div className="col-sm-6">
<div className="row">
<button
type="button"
className="col-sm-4 btn btn-warning btn-sm"
onClick={onSetEndTime}
>
End time
</button>
<div className="col-sm-8">
<input
type="text"
className="form-control"
name="endTime"
disabled
value={displayEndTime}
/>
</div> </div>
</div>
</div> </div>
</div> <div className='questions-container' >
{questions.map(renderQuestions)}
<div className="row mt-2">
<div className="mb-3 row">
<div className="col-sm-3">
<label className="col-form-label">
<strong>Vocal expression</strong>
</label>
<div className="row">
<div className="col-sm-8">
<button
type="button"
className="btn btn-primary w-100 btn-sm"
onClick={onClickAnalyseVoice}
disabled={isLoading || isFacialAnalyzing}
>
Analyse
</button>
</div>
{renderLoading}
</div>
</div>
<label className="col-sm-3 col-form-label">{analysis?.voice}</label>
<div className="col-sm-3 ">
<label className="col-form-label">
<strong>Eye blink ratio</strong>
</label>
<div className="row">
<div className="col-sm-8">
<button
type="button"
className="btn btn-primary w-100 btn-sm"
onClick={onClickAnalyseBlinks}
disabled={isLoading || isFacialAnalyzing}
>
Analyse
</button>
</div>
{renderLoading}
</div>
</div>
<label className="col-sm-3 col-form-label">
<strong>{analysis?.eyeBlinks} (blinks per min)</strong>
</label>
</div>
</div>
<label className="col-sm-2 col-form-label">
<strong>Facial expressions</strong>
</label>
{emotions.length === 0 ? (
<div className="upload-interview-video">
<button
type="button"
className="btn btn-secondary"
onClick={onPressStartFacialExpression}
disabled={isLoading}
>
Start analysis
</button>
</div> </div>
) : ( </div>)
<ResponsiveContainer }
height={280}
width="100%"
key={`rc_${emotions.length}`}
>
<AreaChart data={emotions} key={`ac_${emotions.length}`}>
<XAxis dataKey="Name" padding={{ left: paddingLeft }} />
<YAxis />
<Tooltip isAnimationActive={false} />
<Area
type="monotone"
dataKey="angry"
stroke="red"
fillOpacity={0.2}
fill="red"
key={`angry_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="disgust"
stroke="blue"
fillOpacity={0.2}
fill="blue"
key={`disgust_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="fear"
stroke="black"
fillOpacity={0.2}
fill="black"
key={`fear_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="happy"
stroke="yellow"
fillOpacity={0.2}
fill="yellow"
key={`happy_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="sad"
stroke="gray"
fillOpacity={0.2}
fill="gray"
key={`gray_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="surprise"
stroke="purple"
fillOpacity={0.2}
fill="purple"
key={`purple_${emotions.length}`}
isAnimationActive={false}
/>
<Area
type="monotone"
dataKey="neutral"
stroke="brown"
fillOpacity={0.2}
fill="brown"
key={`neutral_${emotions.length}`}
isAnimationActive={false}
/>
</AreaChart>
</ResponsiveContainer>
)}
</div>
);
};
export default Interview; export default Interview;
\ No newline at end of file
...@@ -6,9 +6,22 @@ import TabNavBar from "../components/TabNavBar"; ...@@ -6,9 +6,22 @@ import TabNavBar from "../components/TabNavBar";
import { useDispatch } from "react-redux"; import { useDispatch } from "react-redux";
import { updateApplication } from "../common/actions/common"; import { updateApplication } from "../common/actions/common";
import Applicant from "../components/Application/Applicant"; import Applicant from "../components/Application/Applicant";
import Analyse from "../components/Application/Analyse";
import Interview from "../components/Application/Interview"; import Interview from "../components/Application/Interview";
const tabs = ["Candidate", "Interview"]; const tabs = ["Candidate", "Interview", "Analyse"];
const getStatusValues = (status?:string) => {
if (status === "Pending") {
return ["Schedule", "Reject"];
} else if (status === "In progress") {
return ["Accept", "Reject"];
} else if (status === "Rejected") {
return ["Pending", "Schedule"];
} else {
return [];
}
}
const Application = () => { const Application = () => {
const dispatch = useDispatch(); const dispatch = useDispatch();
...@@ -21,17 +34,7 @@ const Application = () => { ...@@ -21,17 +34,7 @@ const Application = () => {
); );
const [status, setStatus] = useState(application?.status); const [status, setStatus] = useState(application?.status);
const statusValues = useMemo(() => { const statusValues = getStatusValues(application?.status)
if (status === "Pending") {
return ["Schedule", "Reject"];
} else if (status === "In progress") {
return ["Accept", "Reject"];
} else if (status === "Rejected") {
return ["Pending", "Schedule"];
} else {
return [];
}
}, []);
useEffect(() => { useEffect(() => {
if (application) { if (application) {
...@@ -80,10 +83,11 @@ const Application = () => { ...@@ -80,10 +83,11 @@ const Application = () => {
<div className="card p-4"> <div className="card p-4">
<TabNavBar tabs={tabs} selected={selectedTab} onSelect={setSelectedTab} /> <TabNavBar tabs={tabs} selected={selectedTab} onSelect={setSelectedTab} />
{selectedTab === "Candidate" && <Applicant application={application} />} {selectedTab === "Candidate" && <Applicant application={application} />}
{selectedTab === "Interview" && <Interview application={application} />} {selectedTab === "Interview" && <Interview application={application} />}
{selectedTab === "Analyse" && <Analyse application={application} />}
<hr /> <hr />
{statusValues.length > 0 && (
<div className="row "> <div className="row ">
<label className="col-sm-2 col-form-label">Status</label> <label className="col-sm-2 col-form-label">Status</label>
<div className="col-sm-6 "> <div className="col-sm-6 ">
...@@ -108,7 +112,7 @@ const Application = () => { ...@@ -108,7 +112,7 @@ const Application = () => {
</div> </div>
</div> </div>
</div> </div>
)}
</div> </div>
); );
}; };
......
{ {
"compilerOptions": { "compilerOptions": {
"target": "es5", "target": "es2017",
"lib": ["dom", "dom.iterable", "esnext"], "lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true, "allowJs": true,
"skipLibCheck": true, "skipLibCheck": true,
......
...@@ -3239,6 +3239,13 @@ axe-core@^4.4.3: ...@@ -3239,6 +3239,13 @@ axe-core@^4.4.3:
resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.6.0.tgz#1d07514866fa51262734b3357932fcf86961383a" resolved "https://registry.yarnpkg.com/axe-core/-/axe-core-4.6.0.tgz#1d07514866fa51262734b3357932fcf86961383a"
integrity sha512-L3ZNbXPTxMrl0+qTXAzn9FBRvk5XdO56K8CvcCKtlxv44Aw2w2NCclGuvCWxHPw1Riiq3ncP/sxFYj2nUqdoTw== integrity sha512-L3ZNbXPTxMrl0+qTXAzn9FBRvk5XdO56K8CvcCKtlxv44Aw2w2NCclGuvCWxHPw1Riiq3ncP/sxFYj2nUqdoTw==
axios@^0.26.0:
version "0.26.1"
resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
dependencies:
follow-redirects "^1.14.8"
axios@^1.3.4: axios@^1.3.4:
version "1.3.4" version "1.3.4"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.3.4.tgz#f5760cefd9cfb51fd2481acf88c05f67c4523024" resolved "https://registry.yarnpkg.com/axios/-/axios-1.3.4.tgz#f5760cefd9cfb51fd2481acf88c05f67c4523024"
...@@ -5164,7 +5171,7 @@ flatted@^3.1.0: ...@@ -5164,7 +5171,7 @@ flatted@^3.1.0:
resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787"
integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==
follow-redirects@^1.0.0, follow-redirects@^1.15.0: follow-redirects@^1.0.0, follow-redirects@^1.14.8, follow-redirects@^1.15.0:
version "1.15.2" version "1.15.2"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
...@@ -7261,6 +7268,14 @@ open@^8.0.9, open@^8.4.0: ...@@ -7261,6 +7268,14 @@ open@^8.0.9, open@^8.4.0:
is-docker "^2.1.1" is-docker "^2.1.1"
is-wsl "^2.2.0" is-wsl "^2.2.0"
openai@^3.2.1:
version "3.2.1"
resolved "https://registry.yarnpkg.com/openai/-/openai-3.2.1.tgz#1fa35bdf979cbde8453b43f2dd3a7d401ee40866"
integrity sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A==
dependencies:
axios "^0.26.0"
form-data "^4.0.0"
optionator@^0.8.1: optionator@^0.8.1:
version "0.8.3" version "0.8.3"
resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495"
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment