Commit 257d1d7f authored by kulvinu's avatar kulvinu

KnowledgeIQ Implementation

parent 897d94ae
......@@ -14,6 +14,7 @@ import numpy as np
from trainAnn import headPose, headDistence, facialExperssions, blinkCount, detector
from keras.models import load_model
from sklearn.preprocessing import OneHotEncoder
from mlengine import transform_audio
# Attention classes
attClass = {0: 'Low Attention', 1: 'Mid Attention', 2: 'High Attention'}
......@@ -323,6 +324,32 @@ def predictShapePattern():
# print(prediction2)
return jsonify(response)
# Knowledge IQ evaluation
@app.route('/predictKnowledgeIq', methods = ['POST','GET'])
@cross_origin()
def predictKnowledgeIq():
if request.method == 'POST':
file = request.files.get('file')
questionIndex = request.form.get('questionIndex')
print(questionIndex)
if file is None or file.filename == "":
return jsonify({'error: no file'})
try:
file.save("./"+file.filename)
prediction = transform_audio(file.filename)
data = {'prediction': prediction}
db.db['knowledgeIQScore'].insert_one({
"activityName": "Colour Numbers",
"questionIndex": questionIndex,
"transcription": prediction,
})
return jsonify(data)
except:
return jsonify({'error: Error during pipeline execution'})
return jsonify({'result: test'})
# Running app
if __name__ == "__main__":
......
import numpy as np
import librosa
import librosa.display
import IPython.display as ipd
import matplotlib.pyplot as plt
def read_file(file_name, sample_rate):
wav_file = wave.open(file_name, mode="rb")
channels = wav_file.getnchannels()
num_frames = wav_file.getnframes()
if wav_file.getframerate() != sample_rate:
raise ValueError("Audio file should have a sample rate of %d. got %d" % (sample_rate, wav_file.getframerate()))
samples = wav_file.readframes(num_frames)
wav_file.close()
frames = struct.unpack('h' * num_frames * channels, samples)
if channels == 2:
print("Picovoice processes single-channel audio but stereo file is provided. Processing left channel only.")
return frames[::channels]
## Loading audio
dataset_dir = '/datasets/live_recordings/'
audio_name = 'one.wav'
y, sample_rate = librosa.load(dataset_dir + audio_name, res_type='kaiser_fast')
# Play the original audio
print("Original audio - downsampled by librosa")
ipd.Audio(y, rate=sample_rate)
#------------------------------------------------------------------------------------
## Trim the beginning and ending silence
y_trimmed, _ = librosa.effects.trim(y)
print("Original duration: ", librosa.get_duration(y))
print("Trimmed duration: ", librosa.get_duration(y_trimmed))
figure = plt.figure()
# Trimmed audio - without silence
trimmed = figure.add_subplot(2, 1, 2)
librosa.display.waveplot(y_trimmed, sr=sample_rate, color='r')
plt.title('Trimmed')
# Original audio - with silence at the end
original = figure.add_subplot(2, 1, 1,sharex=trimmed)
librosa.display.waveplot(y, sr=sample_rate)
plt.title('Original')
plt.tight_layout()
plt.show()
# Play the original audio
print("Trimmed audio")
ipd.Audio(y_trimmed, rate=sample_rate)
###Audio Segmentation into windows
from pydub import AudioSegment
from pydub.silence import split_on_silence
sound_file = AudioSegment.from_wav("one.wav")
audio_chunks = split_on_silence(sound_file, min_silence_len=500, silence_thresh=-40)
print ("AudioChunks", audio_chunks)
for i, chunk in enumerate(audio_chunks):
out_file = "./a//.wav".format(i)
print ("exporting", out_file)
chunk.export(out_file, format="wav")
<ш4BFO S"t7^OBpВ[cd\"+Gz )?d,fɵ.gu/Ug_S ! G,(+xP.+s >}N%"[:ax(|[)bVX!Hivɟvzs9J.eà~'}ۓ~QVP @5 :Hf LVPRTd)RbaVen.u
\ No newline at end of file
import difflib
colourNumbers = [ "5 4","5 6 7","5 6 7 8"];
digitSpan = [ "5 4","5 6 7","5 6 7 8"];
def calKnowledgeIqActivityScore(activity, questionIndex, prediction):
prediction = prediction.replace(".","");
cal_score = [];
if (activity == "Colour Numbers"):
if (colourNumbers[questionIndex] == prediction):
score = 100
else:
score = 0
temp = difflib.SequenceMatcher(None, colourNumbers[questionIndex], prediction)
print(temp.get_matching_blocks())
print('Similarity Score: ',temp.ratio())
cal_score = [score, temp.ratio()*100]
elif (activity == "Digit Span"):
if (digitSpan[questionIndex] == prediction):
score = 100
else:
score = 0
temp = difflib.SequenceMatcher(None, colourNumbers[questionIndex], prediction)
print(temp.get_matching_blocks())
print('Similarity Score: ',temp.ratio())
cal_score = [score, temp.ratio()*100]
else:
cal_score = [0,0]
print('Score can be specified to the activity specified in the system!')
return cal_score
\ No newline at end of file
import io
import tensorflow as tf
from tensorflow import keras
import os
import pandas as pd
from sklearn.model_selection import train_test_split
import pickle
import numpy as np
from scipy.io import wavfile as wav
import scipy
import scipy.signal as sps
from python_speech_features import mfcc
from python_speech_features import logfbank
from tensorflow.keras.models import Sequential, save_model, load_model
from audioPreprocessing import readFile
modelfilepath = './saved_model'
datafilepath = './data'
size = 48
DIGITS = ["0", "1", "2", "3","4","5","6","7","8","9"]
model = load_model(modelfilepath, compile=True)
def transform_audio(file):
#read .wav file
#resample audio file
(rate, sig) = readFile(file);
number_of_samples = round(len(sig) * float(16000) / rate)
sig = sps.resample(sig, number_of_samples)
#Encode numbers using 48*13 matrix
#Compute MFCC features from an audio signal
mfcc_feat = mfcc(sig,rate,nfft=2048)
#Return a new numpy array with the specified shape.
mfcc_feat = np.resize(mfcc_feat, (size,13))
#set the independent variable
pred = get_prediction(mfcc_feat);
return pred
def get_prediction(X):
pred = model.predict(X.reshape(-1,size,13,1))
prediction = DIGITS[np.argmax(pred)]
print("\n\033[1mPredicted digit sound: %.0f"%pred.argmax(),"\033[0m \n ")
print("Predicted probability array:")
print(pred)
return prediction
{
"requires": true,
"lockfileVersion": 1,
"dependencies": {
"-": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/-/-/--0.0.1.tgz",
"integrity": "sha512-3HfneK3DGAm05fpyj20sT3apkNcvPpCuccOThOPdzz8sY7GgQGe0l93XH9bt+YzibcTIgUAIMoyVJI740RtgyQ=="
},
"ajv": {
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"requires": {
"fast-deep-equal": "^3.1.1",
"fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "^0.4.1",
"uri-js": "^4.2.2"
}
},
"asn1": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
"integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
"requires": {
"safer-buffer": "~2.1.0"
}
},
"assert-plus": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz",
"integrity": "sha512-NfJ4UzBCcQGLDlQq7nHxH+tv3kyZ0hHQqF5BO6J7tNJeP5do1llPr8dZ8zHonfhAu0PHAdMkSo+8o0wxg9lZWw=="
},
"asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
},
"aws-sign2": {
"version": "0.7.0",
"resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz",
"integrity": "sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA=="
},
"aws4": {
"version": "1.11.0",
"resolved": "https://registry.npmjs.org/aws4/-/aws4-1.11.0.tgz",
"integrity": "sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA=="
},
"bcrypt-pbkdf": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
"integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
"requires": {
"tweetnacl": "^0.14.3"
}
},
"caseless": {
"version": "0.12.0",
"resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz",
"integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw=="
},
"combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"requires": {
"delayed-stream": "~1.0.0"
}
},
"cookiejar": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz",
"integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ=="
},
"core-util-is": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha512-3lqz5YjWTYnW6dlDa5TLaTCcShfar1e40rmcJVwCBJC6mWlFuj0eCHIElmG1g5kyuJ/GD+8Wn4FFCcz4gJPfaQ=="
},
"dashdash": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha512-jRFi8UDGo6j+odZiEpjazZaWqEal3w/basFjQHQEwVtZJGDpxbH1MeYluwCS8Xq5wmLJooDlMgvVarmWfGM44g==",
"requires": {
"assert-plus": "^1.0.0"
}
},
"delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="
},
"ecc-jsbn": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
"integrity": "sha512-eh9O+hwRHNbG4BLTjEl3nw044CkGm5X6LoaCf7LPp7UU8Qrt47JYNi6nPX8xjW97TKGKm1ouctg0QSpZe9qrnw==",
"requires": {
"jsbn": "~0.1.0",
"safer-buffer": "^2.1.0"
}
},
"extend": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
},
"extsprintf": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz",
"integrity": "sha512-11Ndz7Nv+mvAC1j0ktTa7fAb0vLyGGX+rMHNBYQviQDGU0Hw7lhctJANqbPhu9nV9/izT/IntTgZ7Im/9LJs9g=="
},
"fast-deep-equal": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
"integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="
},
"fast-json-stable-stringify": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
"integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="
},
"faye": {
"version": "0.8.11",
"resolved": "https://registry.npmjs.org/faye/-/faye-0.8.11.tgz",
"integrity": "sha512-d2SXlWy+wR8D2AgYjCnJrA8v4RvwKeRQeTB2aLUetyhrNKTU28mAvSMezSZDNyOONVrsF0IY1s4625QgggM2XA==",
"requires": {
"cookiejar": "^2.1.3",
"faye-websocket": ">=0.4.0"
}
},
"faye-websocket": {
"version": "0.11.4",
"resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz",
"integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==",
"requires": {
"websocket-driver": ">=0.5.1"
}
},
"force": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/force/-/force-0.0.3.tgz",
"integrity": "sha512-B/4gl3/7o8Q4jYfXNKSvTHlAPxB1ruYCkxVkiVUUuHziYbDa2NsURljSgpm+Q+d4cGmN1EaAD5QXhLodGN44zA==",
"requires": {
"faye": "~0.8.3",
"mime": "~1.2.9",
"request": "*"
}
},
"forever-agent": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz",
"integrity": "sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw=="
},
"form-data": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz",
"integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==",
"requires": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.6",
"mime-types": "^2.1.12"
}
},
"getpass": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha512-0fzj9JxOLfJ+XGLhR8ze3unN0KZCgZwiSSDz168VERjK8Wl8kVSdcu2kspd4s4wtAa1y/qrVRiAA0WclVsu0ng==",
"requires": {
"assert-plus": "^1.0.0"
}
},
"har-schema": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz",
"integrity": "sha512-Oqluz6zhGX8cyRaTQlFMPw80bSJVG2x/cFb8ZPhUILGgHka9SsokCCOQgpveePerqidZOrT14ipqfJb7ILcW5Q=="
},
"har-validator": {
"version": "5.1.5",
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.5.tgz",
"integrity": "sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w==",
"requires": {
"ajv": "^6.12.3",
"har-schema": "^2.0.0"
}
},
"http-parser-js": {
"version": "0.5.8",
"resolved": "https://registry.npmjs.org/http-parser-js/-/http-parser-js-0.5.8.tgz",
"integrity": "sha512-SGeBX54F94Wgu5RH3X5jsDtf4eHyRogWX1XGT3b4HuW3tQPM4AaBzoUji/4AAJNXCEOWZ5O0DgZmJw1947gD5Q=="
},
"http-signature": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha512-CAbnr6Rz4CYQkLYUtSNXxQPUH2gK8f3iWexVlsnMeD+GjlsQ0Xsy1cOX+mN3dtxYomRy21CiOzU8Uhw6OwncEQ==",
"requires": {
"assert-plus": "^1.0.0",
"jsprim": "^1.2.2",
"sshpk": "^1.7.0"
}
},
"is-typedarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
"integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA=="
},
"isstream": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz",
"integrity": "sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g=="
},
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg=="
},
"json-schema": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz",
"integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA=="
},
"json-schema-traverse": {
"version": "0.4.1",
"resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
"integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="
},
"json-stringify-safe": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
"integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA=="
},
"jsprim": {
"version": "1.4.2",
"resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz",
"integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==",
"requires": {
"assert-plus": "1.0.0",
"extsprintf": "1.3.0",
"json-schema": "0.4.0",
"verror": "1.10.0"
}
},
"mime": {
"version": "1.2.11",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.2.11.tgz",
"integrity": "sha512-Ysa2F/nqTNGHhhm9MV8ure4+Hc+Y8AWiqUdHxsO7xu8zc92ND9f3kpALHjaP026Ft17UfxrMt95c50PLUeynBw=="
},
"mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="
},
"mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"requires": {
"mime-db": "1.52.0"
}
},
"oauth-sign": {
"version": "0.9.0",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
"integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ=="
},
"performance-now": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
"integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow=="
},
"psl": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz",
"integrity": "sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag=="
},
"punycode": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
"integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A=="
},
"qs": {
"version": "6.5.3",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz",
"integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA=="
},
"request": {
"version": "2.88.2",
"resolved": "https://registry.npmjs.org/request/-/request-2.88.2.tgz",
"integrity": "sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw==",
"requires": {
"aws-sign2": "~0.7.0",
"aws4": "^1.8.0",
"caseless": "~0.12.0",
"combined-stream": "~1.0.6",
"extend": "~3.0.2",
"forever-agent": "~0.6.1",
"form-data": "~2.3.2",
"har-validator": "~5.1.3",
"http-signature": "~1.2.0",
"is-typedarray": "~1.0.0",
"isstream": "~0.1.2",
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.19",
"oauth-sign": "~0.9.0",
"performance-now": "^2.1.0",
"qs": "~6.5.2",
"safe-buffer": "^5.1.2",
"tough-cookie": "~2.5.0",
"tunnel-agent": "^0.6.0",
"uuid": "^3.3.2"
}
},
"safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="
},
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sshpk": {
"version": "1.17.0",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz",
"integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==",
"requires": {
"asn1": "~0.2.3",
"assert-plus": "^1.0.0",
"bcrypt-pbkdf": "^1.0.0",
"dashdash": "^1.12.0",
"ecc-jsbn": "~0.1.1",
"getpass": "^0.1.1",
"jsbn": "~0.1.0",
"safer-buffer": "^2.0.2",
"tweetnacl": "~0.14.0"
}
},
"tough-cookie": {
"version": "2.5.0",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.5.0.tgz",
"integrity": "sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==",
"requires": {
"psl": "^1.1.28",
"punycode": "^2.1.1"
}
},
"tunnel-agent": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha512-McnNiV1l8RYeY8tBgEpuodCC1mLUdbSN+CYBL7kJsJNInOP8UjDDEwdk6Mw60vdLLrr5NHKZhMAOSrR2NZuQ+w==",
"requires": {
"safe-buffer": "^5.0.1"
}
},
"tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
},
"uri-js": {
"version": "4.4.1",
"resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
"integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
"requires": {
"punycode": "^2.1.0"
}
},
"uuid": {
"version": "3.4.0",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz",
"integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A=="
},
"verror": {
"version": "1.10.0",
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha512-ZZKSmDAEFOijERBLkmYfJ+vmk3w+7hOLYDNkRCuRuMJGEmqYNCNLyBBFwWKVMhfwaEF3WOd0Zlw86U/WC/+nYw==",
"requires": {
"assert-plus": "^1.0.0",
"core-util-is": "1.0.2",
"extsprintf": "^1.2.0"
}
},
"websocket-driver": {
"version": "0.7.4",
"resolved": "https://registry.npmjs.org/websocket-driver/-/websocket-driver-0.7.4.tgz",
"integrity": "sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==",
"requires": {
"http-parser-js": ">=0.5.1",
"safe-buffer": ">=5.1.0",
"websocket-extensions": ">=0.1.1"
}
},
"websocket-extensions": {
"version": "0.1.4",
"resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz",
"integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg=="
}
}
}
Rroot"_tf_keras_sequential*R{"name": "sequential", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "must_restore_from_config": false, "class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "filters": 8, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "valid", "data_format": "channels_last", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 8, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "valid", "data_format": "channels_last", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "MaxPooling2D", "config": {"name": "max_pooling2d", "trainable": true, "dtype": "float32", "pool_size": {"class_name": "__tuple__", "items": [2, 2]}, "padding": "valid", "strides": {"class_name": "__tuple__", "items": [2, 2]}, "data_format": "channels_last"}}, {"class_name": "Dropout", "config": {"name": "dropout", "trainable": true, "dtype": "float32", "rate": 0.1, "noise_shape": null, "seed": null}}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "data_format": "channels_last"}}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "dtype": "float32", "rate": 0.16, "noise_shape": null, "seed": null}}, {"class_name": "Activation", "config": {"name": "activation", "trainable": true, "dtype": "float32", "activation": "relu"}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "dtype": "float32", "rate": 0.12, "noise_shape": null, "seed": null}}, {"class_name": "Activation", "config": {"name": "activation_1", "trainable": true, "dtype": "float32", "activation": "relu"}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "dtype": "float32", "units": 10, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Activation", "config": {"name": "activation_2", "trainable": true, "dtype": "float32", "activation": "softmax"}}]}, "shared_object_id": 24, "input_spec": [{"class_name": "InputSpec", "config": {"dtype": null, "shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "ndim": 4, "max_ndim": null, "min_ndim": null, "axes": {}}}], "build_input_shape": {"class_name": "TensorShape", "items": [null, 48, 13, 1]}, "is_graph_network": true, "full_save_spec": {"class_name": "__tuple__", "items": [[{"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 48, 13, 1]}, "float32", "conv2d_input"]}], {}]}, "save_spec": {"class_name": "TypeSpec", "type_spec": "tf.TensorSpec", "serialized": [{"class_name": "TensorShape", "items": [null, 48, 13, 1]}, "float32", "conv2d_input"]}, "keras_version": "2.8.0", "backend": "tensorflow", "model_config": {"class_name": "Sequential", "config": {"name": "sequential", "layers": [{"class_name": "InputLayer", "config": {"batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "sparse": false, "ragged": false, "name": "conv2d_input"}, "shared_object_id": 0}, {"class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "filters": 8, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "valid", "data_format": "channels_last", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3}, {"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 8, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "valid", "data_format": "channels_last", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6}, {"class_name": "MaxPooling2D", "config": {"name": "max_pooling2d", "trainable": true, "dtype": "float32", "pool_size": {"class_name": "__tuple__", "items": [2, 2]}, "padding": "valid", "strides": {"class_name": "__tuple__", "items": [2, 2]}, "data_format": "channels_last"}, "shared_object_id": 7}, {"class_name": "Dropout", "config": {"name": "dropout", "trainable": true, "dtype": "float32", "rate": 0.1, "noise_shape": null, "seed": null}, "shared_object_id": 8}, {"class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 9}, {"class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 10}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 11}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 12}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "dtype": "float32", "rate": 0.16, "noise_shape": null, "seed": null}, "shared_object_id": 13}, {"class_name": "Activation", "config": {"name": "activation", "trainable": true, "dtype": "float32", "activation": "relu"}, "shared_object_id": 14}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 15}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 16}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 17}, {"class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "dtype": "float32", "rate": 0.12, "noise_shape": null, "seed": null}, "shared_object_id": 18}, {"class_name": "Activation", "config": {"name": "activation_1", "trainable": true, "dtype": "float32", "activation": "relu"}, "shared_object_id": 19}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "dtype": "float32", "units": 10, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 20}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 21}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 22}, {"class_name": "Activation", "config": {"name": "activation_2", "trainable": true, "dtype": "float32", "activation": "softmax"}, "shared_object_id": 23}]}}, "training_config": {"loss": "categorical_crossentropy", "metrics": [[{"class_name": "MeanMetricWrapper", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 26}]], "weighted_metrics": null, "loss_weights": null, "optimizer_config": {"class_name": "Adam", "config": {"name": "Adam", "learning_rate": 0.0010000000474974513, "decay": 0.0, "beta_1": 0.8999999761581421, "beta_2": 0.9990000128746033, "epsilon": 1e-07, "amsgrad": false}}}}2
root.layer_with_weights-0"_tf_keras_layer*
{"name": "conv2d", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "filters": 8, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "valid", "data_format": "channels_last", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 1}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 2}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 3, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-1": 1}}, "shared_object_id": 27}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 48, 13, 1]}}2
root.layer_with_weights-1"_tf_keras_layer* {"name": "conv2d_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "dtype": "float32", "filters": 8, "kernel_size": {"class_name": "__tuple__", "items": [3, 3]}, "strides": {"class_name": "__tuple__", "items": [1, 1]}, "padding": "valid", "data_format": "channels_last", "dilation_rate": {"class_name": "__tuple__", "items": [1, 1]}, "groups": 1, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 4}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 5}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 6, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 4, "axes": {"-1": 8}}, "shared_object_id": 28}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 46, 11, 8]}}2
 root.layer-2"_tf_keras_layer*{"name": "max_pooling2d", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "MaxPooling2D", "config": {"name": "max_pooling2d", "trainable": true, "dtype": "float32", "pool_size": {"class_name": "__tuple__", "items": [2, 2]}, "padding": "valid", "strides": {"class_name": "__tuple__", "items": [2, 2]}, "data_format": "channels_last"}, "shared_object_id": 7, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": 4, "max_ndim": null, "min_ndim": null, "axes": {}}, "shared_object_id": 29}}2
 root.layer-3"_tf_keras_layer*{"name": "dropout", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dropout", "config": {"name": "dropout", "trainable": true, "dtype": "float32", "rate": 0.1, "noise_shape": null, "seed": null}, "shared_object_id": 8, "build_input_shape": {"class_name": "TensorShape", "items": [null, 22, 4, 8]}}2
 root.layer-4"_tf_keras_layer*{"name": "flatten", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "stateful": false, "must_restore_from_config": false, "class_name": "Flatten", "config": {"name": "flatten", "trainable": true, "batch_input_shape": {"class_name": "__tuple__", "items": [null, 48, 13, 1]}, "dtype": "float32", "data_format": "channels_last"}, "shared_object_id": 9, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 1, "axes": {}}, "shared_object_id": 30}}2
root.layer_with_weights-2"_tf_keras_layer*{"name": "dense", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 10}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 11}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 12, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 704}}, "shared_object_id": 31}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 704]}}2
 root.layer-6"_tf_keras_layer*{"name": "dropout_1", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "dtype": "float32", "rate": 0.16, "noise_shape": null, "seed": null}, "shared_object_id": 13, "build_input_shape": {"class_name": "TensorShape", "items": [null, 64]}}2
 root.layer-7"_tf_keras_layer*{"name": "activation", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Activation", "config": {"name": "activation", "trainable": true, "dtype": "float32", "activation": "relu"}, "shared_object_id": 14}2
 root.layer_with_weights-3"_tf_keras_layer*{"name": "dense_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "dtype": "float32", "units": 64, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 15}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 16}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 17, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 64}}, "shared_object_id": 32}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 64]}}2

 root.layer-9"_tf_keras_layer*{"name": "dropout_2", "trainable": true, "expects_training_arg": true, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "dtype": "float32", "rate": 0.12, "noise_shape": null, "seed": null}, "shared_object_id": 18, "build_input_shape": {"class_name": "TensorShape", "items": [null, 64]}}2
  root.layer-10"_tf_keras_layer*{"name": "activation_1", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Activation", "config": {"name": "activation_1", "trainable": true, "dtype": "float32", "activation": "relu"}, "shared_object_id": 19}2
 root.layer_with_weights-4"_tf_keras_layer*{"name": "dense_2", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "dtype": "float32", "units": 10, "activation": "linear", "use_bias": true, "kernel_initializer": {"class_name": "GlorotUniform", "config": {"seed": null}, "shared_object_id": 20}, "bias_initializer": {"class_name": "Zeros", "config": {}, "shared_object_id": 21}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}, "shared_object_id": 22, "input_spec": {"class_name": "InputSpec", "config": {"dtype": null, "shape": null, "ndim": null, "max_ndim": null, "min_ndim": 2, "axes": {"-1": 64}}, "shared_object_id": 33}, "build_input_shape": {"class_name": "TensorShape", "items": [null, 64]}}2
  root.layer-12"_tf_keras_layer*{"name": "activation_2", "trainable": true, "expects_training_arg": false, "dtype": "float32", "batch_input_shape": null, "stateful": false, "must_restore_from_config": false, "class_name": "Activation", "config": {"name": "activation_2", "trainable": true, "dtype": "float32", "activation": "softmax"}, "shared_object_id": 23}2
root.keras_api.metrics.0"_tf_keras_metric*{"class_name": "Mean", "name": "loss", "dtype": "float32", "config": {"name": "loss", "dtype": "float32"}, "shared_object_id": 34}2
root.keras_api.metrics.1"_tf_keras_metric*{"class_name": "MeanMetricWrapper", "name": "accuracy", "dtype": "float32", "config": {"name": "accuracy", "dtype": "float32", "fn": "categorical_accuracy"}, "shared_object_id": 26}2
\ No newline at end of file
......@@ -11,6 +11,7 @@ import datetime
import base64
import io
from preProcessor import transform_audio
from knowledgeIqScoreCalculation import calKnowledgeIqActivityScore
# Libraries required for model utilization
import cv2 as cv
import numpy as np
......@@ -20,6 +21,8 @@ from controler import captureImage
from keras.models import load_model
from sklearn.preprocessing import OneHotEncoder
import logging
# Attention classes
attClass = {0: 'Low Attention', 1: 'Mid Attention', 2: 'High Attention'}
......@@ -35,6 +38,8 @@ app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
logging.basicConfig(level=logging.INFO)
logging.getLogger('flask_cors').level = logging.DEBUG
# for prediction
......@@ -410,9 +415,9 @@ def mentalChromScores():
})
# Knowledge IQ evaluation
@app.route('/predictKnowledgeIq', methods = ['POST','GET'])
@cross_origin()
def predictKnowledgeIq():
@app.route('/predictColourNumbers', methods = ['POST','GET'])
@cross_origin(origins="localhost")
def predictColourNames():
if request.method == 'POST':
file = request.files.get('file')
questionIndex = request.form.get('questionIndex')
......@@ -428,9 +433,38 @@ def predictKnowledgeIq():
db.db['knowledgeIQScore'].insert_one({
"activityName": "Colour Numbers",
"questionIndex": questionIndex,
"transcription": prediction
})
return jsonify({'success: Data Added to the database successfully'})
except:
return jsonify({'error: Error during pipeline execution'})
return jsonify({'result: test'})
@app.route('/predictDigitSpan', methods = ['POST','GET'])
@cross_origin()
def predictDigitSpan():
if request.method == 'POST':
file = request.files.get('file')
questionIndex = request.form.get('questionIndex')
print(questionIndex)
activity = "Digit Span"
# score = calKnowledgeIqActivityScore(activity, questionIndex, prediction)
if file is None or file.filename == "":
return jsonify({'error: no file'})
try:
file.save("./"+file.filename)
prediction = transform_audio(file.filename)
data = {'prediction': prediction}
db.db['knowledgeIQScore'].insert_one({
"activityName": "Digit Span",
"questionIndex": questionIndex,
"transcription": prediction,
# "accuracyScore": score[0],
# "similarityScore": score[1]
})
return jsonify(data)
return jsonify({'success: Data Added to the database successfully'})
except:
return jsonify({'error: Error during pipeline execution'})
return jsonify({'result: test'})
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -5,6 +5,7 @@ import {
Arithmetic,
ColourNumbers,
DigitSpan,
ColourNumbersController,
DigitSpanController,
VideoPlayerScreen,
} from "../index";
......@@ -96,7 +97,7 @@ const ActivityContainer = () => {
className="activity-container-navigator-item"
onClick={() => {
setCurrentActivityNo(6);
setIsGuideVideoClicked(true);
// setIsGuideVideoClicked(true);
changeColor();
}}
>
......@@ -107,7 +108,7 @@ const ActivityContainer = () => {
className="activity-container-navigator-item"
onClick={() => {
setCurrentActivityNo(7);
setIsGuideVideoClicked(true);
// setIsGuideVideoClicked(true);
changeColor();
}}
>
......@@ -138,7 +139,7 @@ const ActivityContainer = () => {
<PairCancerlation nextActivity={nextActivityHandler} />
)}
{currentActivityNo === 6 && (
<ColourNumbers nextActivity={nextActivityHandler} />
<ColourNumbersController nextActivity={nextActivityHandler} />
)}
{currentActivityNo === 7 && (
<DigitSpanController nextActivity={nextActivityHandler} />
......
......@@ -5,7 +5,7 @@ import Box from "@mui/material/Box";
import "./ColourNumbers.css";
import { lightBlue, yellow } from "@mui/material/colors";
import { createTheme, ThemeProvider, styled } from "@mui/material/styles";
import { RecordingHandler } from "../../reasoningIqEval/recorder/Recorder";
import { RecordingHandler } from "../recorder/Recorder";
import Timer from "../../reasoningIqEval/timer/Timer";
const Item = styled(Paper)(({ theme }) => ({
textAlign: "center",
......@@ -18,7 +18,7 @@ const Item = styled(Paper)(({ theme }) => ({
const ColourNumbers = ({ nextActivity }) => {
const ColourNumbers = ({ GoNext, isAllCompleted }) => {
const [allCompleted, setAllCompleted] = useState(false);
const [activityIndex, setActivityIndex] = useState(1);
......@@ -26,13 +26,15 @@ const switchActivityHandler = () => {
let activityNo = activityIndex + 1;
setActivityIndex(activityNo);
// switch question
console.log('switch');
console.log('switch');
};
useEffect(() => {
setTimeout(() => {
}, 5000);
console.log('rec');
RecordingHandler(`CN.wav`, activityIndex);
RecordingHandler(`CN.wav`, activityIndex, 'CN');
}, [activityIndex]);
return (
......@@ -66,10 +68,11 @@ useEffect(() => {
</Box>
</Grid>
</Grid>
{!allCompleted && <Timer switchActivity={switchActivityHandler} />}
</div>
);
};
export default ColourNumbers;
export default React.memo(ColourNumbers);
import React, { useState, useEffect } from "react";
import { ColourNumbers } from "../../..";
import VideoPlayerScreen from "./videoPlayer";
const ColourNumbersController = () => {
const activityCount = 4;
const [currentActivityNo, setCurrentActivityNo] = useState(1);
const [isAudioCompleted, setIsAudioCompleted] = useState(false);
const [isAllCompleted, setIsAllCompleted] = useState(false);
const nextActivityHandler = () => {
if (currentActivityNo < activityCount - 1) {
setCurrentActivityNo(currentActivityNo + 1);
} else {
setIsAllCompleted(true);
}
setIsAudioCompleted(false);
};
const StartRecording = (state) => {
setTimeout(() => {
setIsAudioCompleted(true);
}, 1000);
};
return (
<>
<div className="w-full h-full">
<div>
{!isAudioCompleted ? (
<VideoPlayerScreen
currentActivtyIndex={currentActivityNo}
changeScreen={StartRecording}
/>
) : (
<>
<ColourNumbers
GoNext={nextActivityHandler}
isAllCompleted={isAllCompleted}
currentActivityNo={currentActivityNo}
/>
</>
)}
</div>
</div>
</>
);
};
export default ColourNumbersController;
.activity-container-angry-grid {
display: grid;
grid-template-rows: 1fr 1fr 1fr;
grid-template-columns: 1fr 1fr 1fr 1fr 1fr;
gap: 0px;
height: 100%;
padding: 20px;
}
#activity-container-item-0 {
grid-row-start: 1;
grid-column-start: 1;
grid-row-end: 4;
grid-column-end: 2;
margin-right: 10px;
border: solid gainsboro;
border-radius: 5px;
padding: 5px;
}
\ No newline at end of file
import React, { useState, useEffect } from "react";
import toast, { Toaster } from "react-hot-toast";
import { ContainerCard } from "../../../../components/index";
import Video from "./videos";
const VideoPlayerScreen = ({ currentActivtyIndex, changeScreen }) => {
return (
<>
<div className="student-page-angry-grid">
<div id="student-page-item-0">
<ContainerCard>
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8">
<div className="lg:text-center">
<h2 className="text-base text-[#3d59c1] font-semibold tracking-wide uppercase mt-4">
ICAAT
</h2>
</div>
<div className="mt-10 sm:mt-0">
<Video currentActivtyIndex={currentActivtyIndex} changeScreen={changeScreen} />
</div>
</div>
</ContainerCard>
</div>
</div>
</>
);
};
export default VideoPlayerScreen;
import React, { useState, useEffect, useRef } from "react";
import Vid4 from "../../../../assets/video/vid4.mp4";
const Video = ({ currentActivtyIndex, changeScreen }) => {
const [videoLink, setVideoLink] = useState("");
const vidRef = useRef();
useEffect(() => {
switch (currentActivtyIndex) {
case 1:
setVideoLink(Vid4);
break;
case 2:
setVideoLink(Vid4);
break;
case 3:
setVideoLink(Vid4);
break;
case 4:
setVideoLink(Vid4);
break;
case 5:
setVideoLink("/Videos/vid1.mp4");
break;
default:
}
const timer = setTimeout(() => {
vidRef.current.play();
}, 3000);
return () => clearTimeout(timer);
}, []);
const myCallback = () => {
changeScreen(false);
}
return (
<div className="flex items-center">
{videoLink && (
<>
<video ref={vidRef} className="w-full h-full p-2" controls onEnded={() => myCallback()}>
<source src={videoLink} type="video/mp4" />
</video>
</>
)}
</div>
);
};
export default Video;
......@@ -8,82 +8,22 @@ import VolumeUpIcon from "@mui/icons-material/VolumeUp";
import VolumeOffIcon from "@mui/icons-material/VolumeOff";
import Picture from "../../../../assets/digitspan.jpg";
import Button from "@mui/material/Button";
// import Instruction1 from '../../../../assets/audio/audio.aac';
// import Instruction2 from '../../../../assets/audio/audio2.aac';
import Timer from "../../reasoningIqEval/timer/Timer";
import { RecordingHandler } from "../recorder/Recorder";
import "./DigitSpan.css";
const DigitSpan = ({ GoNext, isAllCompleted }) => {
const [buttonClicked, setButtonClicked] = useState(1);
const [audio, setAudio] = useState("Instruction1");
const [allCompleted, setAllCompleted] = useState(false);
const DigitSpan = ({ GoNext, isAllCompleted, currentActivityNo }) => {
const [activityIndex, setActivityIndex] = useState(1);
// const switchActivityHandler = ({switchActivity}) => {
// let activityNo = activityIndex + 1;
// setActivityIndex(activityNo);
// // switch question
// console.log("switch");
// // if(activityNo < 4){
// // }
// // else{
// // setAllCompleted(true);
// // }
// };
// const audioFiles =[{source: Instruction1},
// {source: Instruction2}];
// const audioFiles2 =[Instruction1,Instruction2]
// const playAudio = () => {
// const audioPromise = this.audio.play()
// if (audioPromise !== undefined) {
// audioPromise
// .then(_ => {
// // autoplay started
// console.log('Audio Playing')
// })
// .catch(err => {
// // catch dom exception
// console.info(err)
// })
// }
// }
useEffect(() => {
console.log("rec");
RecordingHandler(`DS.wav`, activityIndex);
}, [activityIndex]);
// const switchAudio = () => {
// if (audio < audioFiles.length - 1) {
// setActivityIndex(activityIndex+1)
// this.setState({
// audio: audioFiles2[activityIndex]
// });
// //restart playlist
// } else {
// console.log('ERROR');
// }
// }
RecordingHandler(`DS.wav`, currentActivityNo, 'DS');
}, [currentActivityNo]);
// useEffect(() => {
// console.log('rec');
// RecordingHandler(`CN.wav`, activityIndex);
// }, [activityIndex]);
return (
<div className="container">
{/* <div class="flex flex-wrap justify-center">
<img
alt=""
class="max-w-sm h-auto shadow-lg"
src={Picture}
/>
</div> */}
{isAllCompleted && (
<div className="w-4/6 h-4/6 m-auto">
{" "}
......@@ -100,30 +40,6 @@ const DigitSpan = ({ GoNext, isAllCompleted }) => {
justifyContent="center"
alignItems="center"
>
{/* {buttonClicked === 1 && (
<Avatar sx={{ bgcolor: green[500], width: 100, height: 100}}
onClick={()=> {setButtonClicked(2)}}>
<MicSharpIcon sx={{ fontSize: 60 }}/>
</Avatar>
)}
{buttonClicked === 2 && (
<Avatar sx={{ bgcolor: red[500], width: 100, height: 100 }}
onClick={()=> {setButtonClicked(1)}}>
<MicOffSharpIcon sx={{ fontSize: 60 }}/>
</Avatar>
)} */}
{/* {buttonClicked === 1 && (
<Avatar sx={{ bgcolor: green[500], width: 100, height: 100}}
onClick={()=> {}}>
<VolumeUpIcon sx={{ fontSize: 60 }}/>
</Avatar>
)}
{buttonClicked === 2 && (
<Avatar sx={{ bgcolor: red[500], width: 100, height: 100 }}
onClick={()=> {setButtonClicked(1)}}>
<VolumeOffIcon sx={{ fontSize: 60 }}/>
</Avatar>
)} */}
{!isAllCompleted && <Timer switchActivity={GoNext} />}
</Stack>
{/* <Button
......
......@@ -39,6 +39,7 @@ const DigitSpanController = () => {
<DigitSpan
GoNext={nextActivityHandler}
isAllCompleted={isAllCompleted}
currentActivityNo={currentActivityNo}
/>
</>
)}
......
......@@ -3,11 +3,16 @@ import axios from "axios";
import baseURL from "../../../../config/api";
import API from "../../../../config/api";
export const RecordingHandler = async (fileName, questionIndex) => {
export const RecordingHandler = async (
fileName,
questionIndex,
activityName
) => {
let stream = await navigator.mediaDevices.getUserMedia({
video: false,
audio: true,
});
let recorder = new RecordRTC.StereoAudioRecorder(stream, {
type: "audio",
mimeType: "audio/wav",
......@@ -21,23 +26,28 @@ export const RecordingHandler = async (fileName, questionIndex) => {
await recorder.stop(function () {
let blob = recorder.blob;
processRecording(blob, fileName, questionIndex);
processRecording(blob, fileName, activityName, questionIndex);
});
stream.getTracks().forEach(function (track) {
track.stop();
});
};
const processRecording = (blob, fileName, questionIndex) => {
const processRecording = (blob, fileName, activityName, questionIndex) => {
let recordedFile = new File([blob], fileName);
uploadRecording(recordedFile, fileName, questionIndex);
uploadRecording(recordedFile, fileName, activityName, questionIndex);
};
const uploadRecording = async (file, fileName, questionIndex) => {
const uploadRecording = async (file, fileName, activityName, questionIndex) => {
let data = new FormData();
let candidateID = 0;
if (localStorage) {
candidateID = localStorage.getItem("candidateID");
}
console.log(questionIndex);
data.append("file", file, fileName);
data.append("questionIndex", questionIndex);
data.append("candidateID", candidateID);
const config = {
headers: {
......@@ -46,11 +56,32 @@ const uploadRecording = async (file, fileName, questionIndex) => {
},
};
await API.post(`predictKnowledgeIq`, data, config)
// if (activityName === 'DS'){
// await API.post(`predictDigitSpan`, data, config)
// .then((res) => {
// console.log(res, "DONE" + new Date().toISOString);
// })
// .catch((err) => {
// console.log(err, "ERROR" + new Date().toISOString);
// });
// }
// if (activityName === 'CN'){
// await API.post(`predictColourNumbers`, data, config)
// .then((res) => {
// console.log(res, "DONE" + new Date().toISOString);
// })
// .catch((err) => {
// console.log(err, "ERROR" + new Date().toISOString);
// });
// }
await API.post(`predictColourNumbers`, data, config)
.then((res) => {
console.log(res, "DONE" + new Date().toISOString);
})
.catch((err) => {
console.log(err, "ERROR" + new Date().toISOString);
});
};
......@@ -7,6 +7,7 @@ export { default as PictureConcept } from "./activities/reasoningIqEval/pictureC
export { default as Arithmetic } from "./activities/reasoningIqEval/arithmetic/Arithmetic";
export { default as ImageRow } from "./activities/reasoningIqEval/pictureConcept/ImageRow";
export { default as ColourNumbers } from "./activities/knowledgeIqEval/colourNumbers/ColourNumbers";
export { default as ColourNumbersController } from "./activities/knowledgeIqEval/colourNumbers/colourNumbersController";
export { default as DigitSpan } from "./activities/knowledgeIqEval/digitSpan/DigitSpan";
export { default as EthicalClearenceScreen } from "./activities/ethicalClearence";
export { default as VideoPlayerScreen } from "./activities/videoPlayer";
......
import React, { useState, useEffect, useRef } from "react";
import A1 from "../Audio/A1.mp3";
import A2 from "../Audio/A2.mp3";
import A1 from "../assets/Audio/DS1.mp3";
import A2 from "../assets/Audio/DS2.mp3";
import A3 from "../assets/Audio/DS3.mp3";
import Image from "../assets/digitspan.jpg";
const Audio = ({ currentActivtyIndex, changeScreen }) => {
const myAudio = useRef();
......@@ -16,14 +18,14 @@ const Audio = ({ currentActivtyIndex, changeScreen }) => {
setAudio(A2);
break;
case 3:
setAudio(A1);
break;
case 4:
setAudio(A2);
break;
case 5:
setAudio(A1);
setAudio(A3);
break;
// case 4:
// setAudio(A2);
// break;
// case 5:
// setAudio(A1);
// break;
default:
}
const timer = setTimeout(() => {
......@@ -54,9 +56,17 @@ const Audio = ({ currentActivtyIndex, changeScreen }) => {
onEnded={() => myCallback()}
/>
<button className="text-black" onClick={handleBeep}>
{/* <button className="text-black" onClick={handleBeep}>
Start
</button>
</button> */}
<img
className="items-center justify-center"
src={Image}
style={{width:'200px'}}
alt=""
/>
</>
)}
</div>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment