Commit 5422bd2a authored by Adithya Kahawanugoda's avatar Adithya Kahawanugoda

Merge branch 'reasoning-eval-backend' into 'master'

picture concept configured

See merge request !20
parents 67221e7b d43c22a6
import os
import azure.cognitiveservices.speech as speechsdk
import time
speechKey , region = "a61a66a0564f4830896d212b125f72df", "southeastasia"
def transform_audio(fileName):
speech_config = speechsdk.SpeechConfig(subscription=speechKey, region=region)
speech_config.speech_recognition_language="si-LK"
audio_input = speechsdk.AudioConfig(filename=fileName)
speech_recognizer = speechsdk.SpeechRecognizer(speech_config=speech_config, audio_config=audio_input)
done = False
def stop_cb(evt):
print('CLOSING on {}'.format(evt))
speech_recognizer.stop_continuous_recognition()
nonlocal done
done = True
all_results = []
def handle_final_result(evt):
all_results.append(evt.result.text)
speech_recognizer.recognized.connect(handle_final_result)
speech_recognizer.recognizing.connect(lambda evt: print('RECOGNIZING: {}'.format(evt)))
speech_recognizer.recognized.connect(lambda evt: print('RECOGNIZED: {}'.format(evt)))
speech_recognizer.session_started.connect(lambda evt: print('SESSION STARTED: {}'.format(evt)))
speech_recognizer.session_stopped.connect(lambda evt: print('SESSION STOPPED {}'.format(evt)))
speech_recognizer.canceled.connect(lambda evt: print('CANCELED {}'.format(evt)))
speech_recognizer.session_stopped.connect(stop_cb)
speech_recognizer.canceled.connect(stop_cb)
speech_recognizer.start_continuous_recognition()
while not done:
time.sleep(.5)
print("Printing all results:")
print(all_results)
results = all_results
speech_recognizer.stop_continuous_recognition()
return results
...@@ -10,7 +10,7 @@ from PIL import Image ...@@ -10,7 +10,7 @@ from PIL import Image
import datetime import datetime
import base64 import base64
import io import io
from preProcessor import transform_audio
# Libraries required for model utilization # Libraries required for model utilization
import cv2 as cv import cv2 as cv
import numpy as np import numpy as np
...@@ -331,6 +331,32 @@ def predictShapePattern(): ...@@ -331,6 +331,32 @@ def predictShapePattern():
return jsonify(response) return jsonify(response)
# Reasoning IQ evaluation
@app.route('/predictReasoning', methods = ['POST','GET'])
@cross_origin()
def predictReasoning():
if request.method == 'POST':
file = request.files.get('file')
questionIndex = request.form.get('questionIndex')
print(questionIndex)
if file is None or file.filename == "":
return jsonify({'error: no file'})
try:
file.save("./"+file.filename)
prediction = transform_audio(file.filename)
data = {'prediction': prediction}
db.db['reasoningIQScore'].insert_one({
"activityName": "Picture Concept",
"questionIndex": questionIndex,
"transcription": prediction,
})
return jsonify(data)
except:
return jsonify({'error: Error during pipeline execution'})
return jsonify({'result: test'})
@app.route('/mentalChromScores', methods=['POST']) @app.route('/mentalChromScores', methods=['POST'])
@cross_origin() @cross_origin()
......
...@@ -82,32 +82,6 @@ const PictureConcept = ({ nextActivity }) => { ...@@ -82,32 +82,6 @@ const PictureConcept = ({ nextActivity }) => {
"https://i.ibb.co/94Mmmjy/Vector-Ant-Transparent-PNG.png", "https://i.ibb.co/94Mmmjy/Vector-Ant-Transparent-PNG.png",
]; ];
const [time, setTime] = useState({
seconds: 0,
});
const [isCancelled] = useState(false);
useEffect(() => {
const calculateTime = () => {
setTimeout(() => {
let nSeconds = time.seconds;
let nMinutes = time.minutes;
let nHours = time.hours;
nSeconds++;
!isCancelled &&
setTime({ seconds: nSeconds, minutes: nMinutes, hours: nHours });
}, 1000);
};
calculateTime();
if (time.seconds === 30) {
setImg(img + 4);
imageSwitcher();
time.seconds = 0;
}
}, []);
const imageSwitcher = () => { const imageSwitcher = () => {
var urlCount = 2; var urlCount = 2;
...@@ -140,6 +114,18 @@ const PictureConcept = ({ nextActivity }) => { ...@@ -140,6 +114,18 @@ const PictureConcept = ({ nextActivity }) => {
} }
}; };
const switchActivityHandler = () => {
let activityNo = activityIndex + 1;
setActivityIndex(activityNo);
setImg(img + 4);
imageSwitcher();
};
useEffect(() => {
// console.log(activityIndex);
RecordingHandler(`PC.wav`, activityIndex);
}, [activityIndex]);
return ( return (
<div className="mt-11"> <div className="mt-11">
{!allCompleted && imgSrc1 && <ImageRow tempImgSrc={imgSrc1} />} {!allCompleted && imgSrc1 && <ImageRow tempImgSrc={imgSrc1} />}
...@@ -155,7 +141,7 @@ const PictureConcept = ({ nextActivity }) => { ...@@ -155,7 +141,7 @@ const PictureConcept = ({ nextActivity }) => {
</div> </div>
)} )}
{allCompleted && ( {/* {allCompleted && (
<button <button
className="float-right bg-transparent hover:bg-red-500 text-red-700 font-semibold hover:text-white py-2 px-4 border border-red-500 hover:border-transparent rounded" className="float-right bg-transparent hover:bg-red-500 text-red-700 font-semibold hover:text-white py-2 px-4 border border-red-500 hover:border-transparent rounded"
onClick={() => { onClick={() => {
...@@ -164,29 +150,9 @@ const PictureConcept = ({ nextActivity }) => { ...@@ -164,29 +150,9 @@ const PictureConcept = ({ nextActivity }) => {
> >
Next Activity Next Activity
</button> </button>
)} )} */}
{!allCompleted && <Timer />} {!allCompleted && <Timer switchActivity={switchActivityHandler} />}
{!allCompleted && ( {!allCompleted && <div></div>}
<div>
{/* <button
className="float-right bg-transparent hover:bg-blue-500 text-blue-700 font-semibold hover:text-white py-2 px-4 border border-blue-500 hover:border-transparent rounded"
onClick={() => {
setImg(img + 4);
imageSwitcher();
}}
>
Next
</button> */}
<button
className="float-right bg-transparent hover:bg-blue-500 text-blue-700 font-semibold hover:text-white py-2 px-4 border border-blue-500 hover:border-transparent rounded"
onClick={() => {
RecordingHandler("PC4.wav"); //pass activity(PC), question(1) names with wav extension
}}
>
Start Rec
</button>
</div>
)}
</div> </div>
); );
}; };
......
import * as RecordRTC from "recordrtc"; import * as RecordRTC from "recordrtc";
import axios from "axios"; import axios from "axios";
import baseURL from "../../../../config/api"; import baseURL from "../../../../config/api";
import API from "../../../../config/api";
export const RecordingHandler = async (fileName) => { export const RecordingHandler = async (fileName, questionIndex) => {
let stream = await navigator.mediaDevices.getUserMedia({ let stream = await navigator.mediaDevices.getUserMedia({
video: false, video: false,
audio: true, audio: true,
...@@ -16,40 +17,40 @@ export const RecordingHandler = async (fileName) => { ...@@ -16,40 +17,40 @@ export const RecordingHandler = async (fileName) => {
recorder.record(); recorder.record();
const sleep = (m) => new Promise((r) => setTimeout(r, m)); const sleep = (m) => new Promise((r) => setTimeout(r, m));
await sleep(30000); await sleep(15000);
await recorder.stop(function () { await recorder.stop(function () {
let blob = recorder.blob; let blob = recorder.blob;
processRecording(blob, fileName); processRecording(blob, fileName, questionIndex);
}); });
stream.getTracks().forEach(function (track) { stream.getTracks().forEach(function (track) {
track.stop(); track.stop();
}); });
}; };
const processRecording = (blob, fileName) => { const processRecording = (blob, fileName, questionIndex) => {
let recordedFile = new File([blob], fileName); let recordedFile = new File([blob], fileName);
uploadRecording(recordedFile, fileName); uploadRecording(recordedFile, fileName, questionIndex);
}; };
const uploadRecording = (file, fileName) => { const uploadRecording = async (file, fileName, questionIndex) => {
let data = new FormData(); let data = new FormData();
console.log(questionIndex);
data.append("file", file, fileName); data.append("file", file, fileName);
data.append("questionIndex", questionIndex);
const config = { const config = {
headers: { headers: {
"content-type": "multipart/form-data", "content-type": "multipart/form-data",
Accept: "*/*", "Access-Control-Allow-Origin": "*",
}, },
}; };
axios await API.post(`predictReasoning`, data, config)
.post(`${baseURL}predictReasoning`, data, config)
.then((res) => { .then((res) => {
console.log(res, "DONE"); console.log(res, "DONE" + new Date().toISOString);
}) })
.catch((err) => { .catch((err) => {
console.log(err, "ERROR"); console.log(err, "ERROR" + new Date().toISOString);
}); });
}; };
import React, { useState } from "react"; import React from "react";
import { CountdownCircleTimer } from "react-countdown-circle-timer"; import { CountdownCircleTimer } from "react-countdown-circle-timer";
import "./Timer.css"; import "./Timer.css";
const Timer = () => { const Timer = ({ switchActivity }) => {
const renderTime = ({ remainingTime }) => { const renderTime = ({ remainingTime }) => {
if (remainingTime === 0) { if (remainingTime === 0) {
return <div className="timer text-black">--</div>; return <div className="timer text-black">--</div>;
...@@ -16,15 +16,16 @@ const Timer = () => { ...@@ -16,15 +16,16 @@ const Timer = () => {
<div className="timer-wrapper"> <div className="timer-wrapper">
<CountdownCircleTimer <CountdownCircleTimer
isPlaying isPlaying
duration={100} duration={15}
colors={["#004777", "#F7B801", "#A30000", "#A30000"]} colors={["#004777", "#F7B801", "#A30000", "#A30000"]}
colorsTime={[100, 50, 25, 10]} colorsTime={[15, 10, 7, 5]}
rotation={"counterclockwise"} rotation={"counterclockwise"}
size={100} size={100}
strokeWidth={8} strokeWidth={8}
onComplete={() => { onComplete={() => {
// change activity // change activity
return { shouldRepeat: true, delay: 1.5 }; // repeat animation in 1.5 seconds switchActivity();
return { shouldRepeat: true, delay: 0 }; // repeat animation in 1.5 seconds
}} }}
> >
{renderTime} {renderTime}
......
import axios from 'axios'; import axios from "axios";
export default axios.create({ export default axios.create({
baseURL: `http://127.0.0.1:5000/` baseURL: `http://127.0.0.1:5000/`,
}); });
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment