Commit c6817b52 authored by W.D.R.P. Sandeepa's avatar W.D.R.P. Sandeepa

implemented build_model function

parent 0b43ffd9
......@@ -5,3 +5,47 @@ from sklearn.model_selection import train_test_split
DATA_PATH = "data.json"
SAVE_MODEL_PATH = "model.h5"
LEARNING_RATE = 0.0001
EPOCHS = 40
BATCH_SIZE = 32
NUM_KEYWORDS = 10
def build_model(input_shape, learning_rate, error="sparse_categorical_crossentropy"):
# build network
model = keras.Sequential()
# copy layer 1
model.add(keras.layers.Conv2D(64, (3, 3), activation="relu", input_shape=input_shape, kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.MaxPool2D((3, 3), strides=(2, 2), padding="same"))
# copy layer 2
model.add(keras.layers.Conv2D(32, (3, 3), activation="relu", kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.MaxPool2D((3, 3), strides=(2, 2), padding="same"))
# copy layer 3
model.add(keras.layers.Conv2D(32, (2, 2), activation="relu", kernel_regularizer=keras.regularizers.l2(0.001)))
model.add(keras.layers.BatchNormalization())
model.add(keras.layers.MaxPool2D((2, 2), strides=(2, 2), padding="same"))
# flatten the output feed it into a dense layer
model.add(keras.layers.Flatten())
model.add(keras.layers.Dense(64, activation="relu"))
model.add(keras.layers.Dropout(0.3))
# softmax classifier
model.add(keras.layers.Dense(NUM_KEYWORDS, activation="softmax"))
# compile the model
optimiser = keras.optimizers.Adam(learning_rate=learning_rate)
model.compile(optimizer=optimiser, loss=error, metrics=["accuracy"])
# print model overview
model.summary()
return model
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment