Change model architecture to a MLP

This commit is contained in:
coolneng 2021-07-06 01:44:58 +02:00
parent 1a1262b0b1
commit eabb7f0285
Signed by: coolneng
GPG Key ID: 9893DA236405AF57
2 changed files with 9 additions and 16 deletions

View File

@ -6,8 +6,8 @@ class Hyperparameters:
train_dataset="data/train_data.tfrecords",
test_dataset="data/test_data.tfrecords",
eval_dataset="data/eval_data.tfrecords",
epochs=1000,
batch_size=256,
epochs=100,
batch_size=64,
learning_rate=0.004,
l2_rate=0.001,
log_directory="logs",

View File

@ -17,23 +17,16 @@ def build_model(hyperparams) -> Model:
"""
model = Sequential(
[
Input(shape=(None, hyperparams.max_length, len(BASES))),
Input(shape=(hyperparams.batch_size, hyperparams.max_length, len(BASES))),
Masking(mask_value=-1),
Conv1D(
filters=16,
kernel_size=5,
activation="relu",
kernel_regularizer=l2(hyperparams.l2_rate),
Dense(
units=16, activation="relu", kernel_regularizer=l2(hyperparams.l2_rate)
),
MaxPool1D(pool_size=3, strides=1),
Conv1D(
filters=16,
kernel_size=3,
activation="relu",
kernel_regularizer=l2(hyperparams.l2_rate),
Dropout(rate=0.3),
Dense(
units=16, activation="relu", kernel_regularizer=l2(hyperparams.l2_rate)
),
MaxPool1D(pool_size=3, strides=1),
GlobalAveragePooling1D(),
Dropout(rate=0.3),
Dense(
units=16, activation="relu", kernel_regularizer=l2(hyperparams.l2_rate)
),