Commit 227141d8 authored by Ranathunge R. A. D. O's avatar Ranathunge R. A. D. O

Added app.py

parent 412624df
from flask import Flask,request
from flask import render_template
from joblib import load
import sys
import subprocess
import os
import random
import sys
import glob
import keras
import IPython.display as ipd
import librosa
import librosa.display
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import plotly.graph_objs as go
import plotly.offline as py
import plotly.tools as tls
import seaborn as sns
import scipy.io.wavfile
import tensorflow
from werkzeug.datastructures import RequestCacheControl
py.init_notebook_mode(connected=True)
from tensorflow.keras import regularizers
from tensorflow.keras.callbacks import ModelCheckpoint, LearningRateScheduler, EarlyStopping
from tensorflow.keras.callbacks import History, ReduceLROnPlateau, CSVLogger
from tensorflow.keras.models import Model, Sequential
from tensorflow.keras.layers import Dense, Embedding, LSTM
from tensorflow.keras.layers import Input, Flatten, Dropout, Activation, BatchNormalization
from tensorflow.keras.layers import Conv1D, MaxPooling1D, AveragePooling1D
from tensorflow.keras.preprocessing import sequence
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow.keras.preprocessing.text import Tokenizer
from keras.utils import np_utils
from tensorflow.keras.utils import to_categorical
from sklearn.metrics import confusion_matrix
from sklearn.preprocessing import LabelEncoder
from scipy.fftpack import fft
from scipy import signal
from scipy.io import wavfile
from tqdm import tqdm
import tensorflow as tf
from tensorflow.keras.initializers import glorot_uniform
import joblib
stress_scaler=joblib.load('scaler.sav')
input_duration=3
app = Flask(__name__)
@app.route('/')
def index():
return render_template('stress.html')
@app.route("/upload", methods=["POST", "GET"])
def default():
f = request.files['file']
filePath = "uploads/audio.wav"
f.save(filePath)
return render_template('stress.html',upload_status="Uploaded")
@app.route('/detection',methods=["GET","POST"])
def detection():
labels=["Not Stress","Stress"]
loaded_model = tf.keras.models.load_model("models/Data_noiseNshift.h5",custom_objects={'GlorotUniform': glorot_uniform()})
print("Loaded model from disk")
X, sample_rate = librosa.load("uploads/audio.wav", res_type='kaiser_fast',duration=input_duration,sr=22050*2,offset=0.5)
sample_rate = np.array(sample_rate)
mfccs = np.mean(librosa.feature.mfcc(y=X, sr=sample_rate, n_mfcc=13), axis=0)
feature = mfccs
data=np.array([feature])
data=stress_scaler.transform(data)
data=data.reshape(data.shape[0],data.shape[1],1)
result=loaded_model.predict(data)
print(result)
category=np.argmax(result,axis=1)[0]
acc=round(np.max(result,axis=1)[0]*100)
label=labels[category]
print(' label:',label,' accuracy:',acc)
return render_template('stress_table.html',label=label,acc=acc)
@app.route('/emotion_detection',methods=["GET","POST"])
def emotion_detection():
labels=["Neutral","Calm","Happy","Sad", "Angry", "Fearful"]
loaded_model = tf.keras.models.load_model("models/Data_noiseNshift_e.h5",custom_objects={'GlorotUniform': glorot_uniform()})
print("Loaded model from disk")
X, sample_rate = librosa.load("uploads/audio.wav", res_type='kaiser_fast',duration=input_duration,sr=22050*2,offset=0.5)
sample_rate = np.array(sample_rate)
mfccs = np.mean(librosa.feature.mfcc(y=X, sr=sample_rate, n_mfcc=13), axis=0)
feature = mfccs
data=np.array([feature])
data=stress_scaler.transform(data)
data=data.reshape(data.shape[0],data.shape[1],1)
result=loaded_model.predict(data)
print(result)
category=np.argmax(result,axis=1)[0]
acc=round(np.max(result,axis=1)[0]*100)
label=labels[category]
print(' label:',label,' accuracy:',acc)
return render_template('stress_table.html',label=label,acc=acc)
# run Server
if __name__ == "__main__":
app.run(
host="0.0.0.0",
port=5000,
debug=True,
)
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment