Commit 68ccb731 authored by W.H.M Kasun Sampath's avatar W.H.M Kasun Sampath

Initial Push commit

parent 4a8576e2
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="f0bdacf7-698f-4afd-a62d-fc48c233d8c2" name="Default Changelist" comment="" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="FileTemplateManagerImpl">
<option name="RECENT_TEMPLATES">
<list>
<option value="Python Script" />
</list>
</option>
</component>
<component name="ProjectId" id="1uhlLxov2xRxoYAaK9BaFFkGEtl" />
<component name="ProjectViewState">
<option name="hideEmptyMiddlePackages" value="true" />
<option name="showLibraryContents" value="true" />
</component>
<component name="PropertiesComponent">
<property name="RunOnceActivity.OpenProjectViewOnStart" value="true" />
<property name="RunOnceActivity.ShowReadmeOnStart" value="true" />
<property name="WebServerToolWindowFactoryState" value="false" />
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
<property name="settings.editor.selected.configurable" value="editor.preferences.fonts.default" />
</component>
<component name="RunManager" selected="Python.train">
<configuration default="true" type="PythonConfigurationType" factoryName="Python">
<module name="CNN" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="" />
<option name="IS_MODULE_SDK" value="false" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="true" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="prepare_dataset" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="CNN" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/prepare_dataset.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="test" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="CNN" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/test.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="true" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<configuration name="train" type="PythonConfigurationType" factoryName="Python" temporary="true" nameIsGenerated="true">
<module name="CNN" />
<option name="INTERPRETER_OPTIONS" value="" />
<option name="PARENT_ENVS" value="true" />
<envs>
<env name="PYTHONUNBUFFERED" value="1" />
</envs>
<option name="SDK_HOME" value="" />
<option name="WORKING_DIRECTORY" value="$PROJECT_DIR$" />
<option name="IS_MODULE_SDK" value="true" />
<option name="ADD_CONTENT_ROOTS" value="true" />
<option name="ADD_SOURCE_ROOTS" value="true" />
<EXTENSION ID="PythonCoverageRunConfigurationExtension" runner="coverage.py" />
<option name="SCRIPT_NAME" value="$PROJECT_DIR$/train.py" />
<option name="PARAMETERS" value="" />
<option name="SHOW_COMMAND_LINE" value="false" />
<option name="EMULATE_TERMINAL" value="false" />
<option name="MODULE_MODE" value="false" />
<option name="REDIRECT_INPUT" value="false" />
<option name="INPUT_FILE" value="" />
<method v="2" />
</configuration>
<recent_temporary>
<list>
<item itemvalue="Python.train" />
<item itemvalue="Python.prepare_dataset" />
<item itemvalue="Python.test" />
</list>
</recent_temporary>
</component>
<component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="f0bdacf7-698f-4afd-a62d-fc48c233d8c2" name="Default Changelist" comment="" />
<created>1625125649454</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1625125649454</updated>
<workItem from="1625125650607" duration="2786000" />
<workItem from="1625136350571" duration="2538000" />
<workItem from="1625199940365" duration="2484000" />
<workItem from="1625215533135" duration="3702000" />
<workItem from="1625225454923" duration="822000" />
<workItem from="1625227158105" duration="3413000" />
<workItem from="1625233722169" duration="2141000" />
<workItem from="1625235914364" duration="2053000" />
<workItem from="1625238978838" duration="2373000" />
<workItem from="1625277592206" duration="880000" />
<workItem from="1625279974099" duration="940000" />
<workItem from="1625282581731" duration="5954000" />
<workItem from="1625290235704" duration="6379000" />
<workItem from="1625366292780" duration="1145000" />
<workItem from="1625375024100" duration="7289000" />
</task>
<servers />
</component>
<component name="TypeScriptGeneratedFilesManager">
<option name="version" value="3" />
</component>
<component name="com.intellij.coverage.CoverageDataManagerImpl">
<SUITE FILE_PATH="coverage/CNN$prepare_dataset.coverage" NAME="prepare_dataset Coverage Results" MODIFIED="1625290254578" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
<SUITE FILE_PATH="coverage/CNN$train.coverage" NAME="train Coverage Results" MODIFIED="1625397203062" SOURCE_PROVIDER="com.intellij.coverage.DefaultCoverageFileProvider" RUNNER="coverage.py" COVERAGE_BY_TEST_ENABLED="true" COVERAGE_TRACING_ENABLED="false" WORKING_DIRECTORY="$PROJECT_DIR$" />
</component>
</project>
\ No newline at end of file
{
"mapping": [],
"labels": [],
"MFCCs": [],
"files": []
}
\ No newline at end of file
################################################################################
# This .gitignore file was automatically created by Microsoft(R) Visual Studio.
################################################################################
.idea
\ No newline at end of file
File added
import librosa
import os
import json
DATASET_PATH = "dataset"
JSON_PATH = "data.json"
SAMPLES_TO_CONSIDER = 22050 # 1 sec. of audio
def preprocess_dataset(dataset_path, json_path, num_mfcc=13, n_fft=2048, hop_length=512):
# dictionary where we'll store mapping, labels, MFCCs and filenames
data = {
"mapping": [],
"labels": [],
"MFCCs": [],
"files": []
}
# loop through all sub-dirs
for i, (dirpath, dirnames, filenames) in enumerate(os.walk(dataset_path)):
# ensure we're at sub-folder level
if dirpath is not dataset_path:
# save label (i.e., sub-folder name) in the mapping
label = dirpath.split("/")[-1]
data["mapping"].append(label)
print("\nProcessing: '{}'".format(label))
# process all audio files in sub-dir and store MFCCs
for f in filenames:
file_path = os.path.join(dirpath, f)
# load audio file and slice it to ensure length consistency among different files
signal, sample_rate = librosa.load(file_path)
# drop audio files with less than pre-decided number of samples
if len(signal) >= SAMPLES_TO_CONSIDER:
# ensure consistency of the length of the signal
signal = signal[:SAMPLES_TO_CONSIDER]
# extract MFCCs
MFCCs = librosa.feature.mfcc(signal, sample_rate, n_mfcc=num_mfcc, n_fft=n_fft,
hop_length=hop_length)
# store data for analysed track
data["MFCCs"].append(MFCCs.T.tolist())
data["labels"].append(i-1)
data["files"].append(file_path)
print("{}: {}".format(file_path, i-1))
# save data in json file
with open(json_path, "w") as fp:
json.dump(data, fp, indent=4)
if __name__ == "__main__":
preprocess_dataset(DATASET_PATH, JSON_PATH)
\ No newline at end of file
import json
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
DATA_PATH = "data.json"
SAVED_MODEL_PATH = "model.h5"
EPOCHS = 40
BATCH_SIZE = 32
PATIENCE = 5
LEARNING_RATE = 0.0001
def load_data(data_path):
with open(data_path, "r") as fp:
data = json.load(fp)
X = np.array(data["MFCCs"])
y = np.array(data["labels"])
print("Training sets loaded!")
return X, y
def prepare_dataset(data_path, test_size=0.2, validation_size=0.2):
# load dataset
X, y = load_data(data_path)
print(X)
# create train, validation, test split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=test_size)
X_train, X_validation, y_train, y_validation = train_test_split(X_train, y_train, test_size=validation_size)
# add an axis to nd array
X_train = X_train[..., np.newaxis]
X_test = X_test[..., np.newaxis]
X_validation = X_validation[..., np.newaxis]
return X_train, y_train, X_validation, y_validation, X_test, y_test
def build_model(input_shape, loss="sparse_categorical_crossentropy", learning_rate=0.0001):
# build network architecture using convolutional layers
model = tf.keras.models.Sequential()
# 1st conv layer
model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='relu', input_shape=input_shape,
kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same'))
# 2nd conv layer
model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='relu',
kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.MaxPooling2D((3, 3), strides=(2, 2), padding='same'))
# 3rd conv layer
model.add(tf.keras.layers.Conv2D(32, (2, 2), activation='relu',
kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.BatchNormalization())
model.add(tf.keras.layers.MaxPooling2D((2, 2), strides=(2, 2), padding='same'))
# flatten output and feed into dense layer
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(64, activation='relu'))
tf.keras.layers.Dropout(0.3)
# softmax output layer
model.add(tf.keras.layers.Dense(10, activation='softmax'))
optimiser = tf.optimizers.Adam(learning_rate=learning_rate)
# compile model
model.compile(optimizer=optimiser,
loss=loss,
metrics=["accuracy"])
# print model parameters on console
model.summary()
return model
def train(model, epochs, batch_size, patience, X_train, y_train, X_validation, y_validation):
earlystop_callback = tf.keras.callbacks.EarlyStopping(monitor="accuracy", min_delta=0.001, patience=patience)
# train model
history = model.fit(X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
validation_data=(X_validation, y_validation),
callbacks=[earlystop_callback])
return history
def plot_history(history):
fig, axs = plt.subplots(2)
# create accuracy subplot
axs[0].plot(history.history["accuracy"], label="accuracy")
axs[0].plot(history.history['val_accuracy'], label="val_accuracy")
axs[0].set_ylabel("Accuracy")
axs[0].legend(loc="lower right")
axs[0].set_title("Accuracy evaluation")
# create loss subplot
axs[1].plot(history.history["loss"], label="loss")
axs[1].plot(history.history['val_loss'], label="val_loss")
axs[1].set_xlabel("Epoch")
axs[1].set_ylabel("Loss")
axs[1].legend(loc="upper right")
axs[1].set_title("Loss evaluation")
plt.show()
def main():
# generate train, validation and test sets
X_train, y_train, X_validation, y_validation, X_test, y_test = prepare_dataset(DATA_PATH)
# create network
input_shape = (X_train.shape[1], X_train.shape[2], 1)
model = build_model(input_shape, learning_rate=LEARNING_RATE)
# train network
history = train(model, EPOCHS, BATCH_SIZE, PATIENCE, X_train, y_train, X_validation, y_validation)
# plot accuracy/loss for training/validation set as a function of the epochs
plot_history(history)
# evaluate network on test set
test_loss, test_acc = model.evaluate(X_test, y_test)
print("\nTest loss: {}, test accuracy: {}".format(test_loss, 100 * test_acc))
# save model
model.save(SAVED_MODEL_PATH)
if __name__ == "__main__":
main()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment