Commit 74a60172 authored by W.D.R.P. Sandeepa's avatar W.D.R.P. Sandeepa

Merge branch 'it18218640' into 'master'

implemented build_model function

See merge request !34
parents 5a5c75a3 c6817b52
...@@ -4,4 +4,48 @@ import tensorflow.keras as keras ...@@ -4,4 +4,48 @@ import tensorflow.keras as keras
from sklearn.model_selection import train_test_split from sklearn.model_selection import train_test_split
DATA_PATH = "data.json" DATA_PATH = "data.json"
SAVE_MODEL_PATH = "model.h5" SAVE_MODEL_PATH = "model.h5"
\ No newline at end of file
LEARNING_RATE = 0.0001
EPOCHS = 40
BATCH_SIZE = 32
NUM_KEYWORDS = 10
def build_model(input_shape, learning_rate, error="sparse_categorical_crossentropy"):
# build network
model = keras.Sequential()
# copy layer 1
model.add(keras.layers.Conv2D(64, (3, 3), activation="relu", input_shape=input_shape, kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.MaxPool2D((3, 3), strides=(2, 2), padding="same"))
# copy layer 2
model.add(keras.layers.Conv2D(32, (3, 3), activation="relu", kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.MaxPool2D((3, 3), strides=(2, 2), padding="same"))
# copy layer 3
model.add(keras.layers.Conv2D(32, (2, 2), activation="relu", kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.MaxPool2D((2, 2), strides=(2, 2), padding="same"))
# flatten the output feed it into a dense layer
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(64, activation="relu"))
model.add(keras.layers.Dropout(0.3))
# softmax classifier
model.add(keras.layers.Dense(NUM_KEYWORDS, activation="softmax"))
# compile the model
optimiser = keras.optimizers.Adam(learning_rate=learning_rate)
model.compile(optimizer=optimiser, loss=error, metrics=["accuracy"])
# print model overview
model.summary()
return model
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment