ai_and_crafts/simple_hyperparam_tuning_01.py

162 lines
4.6 KiB
Python

import numpy as np
from scipy.optimize import minimize
import tensorflow as tf
import matplotlib.pyplot as plt
from pathlib import Path
import logging
__author__ = 'Luis Mata'
def objective(x):
'''
Beale's function to be minimized
'''
x, y = x[0], x[1]
return (1.5 - x + x * y)**2 + (2.25 - x + x * y**2)**2 + (2.625 - x +
x * y**3)**2
def get_mnist():
# get
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
# normalize
x_train, x_test = x_train / 255.0, x_test / 255.0
# reshape 1D
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
# convert class vectors to categorical data (binary)
y_train = tf.keras.utils.to_categorical(y_train, 10)
y_test = tf.keras.utils.to_categorical(y_test, 10)
return {
'x_train': x_train,
'x_test': x_test,
'y_train': y_train,
'y_test': y_test
}
def build(input_dim, num_classes):
return tf.keras.Sequential(
name='exponential_decay_test',
layers=
[ # TODO: if feeling fancy add a regularizer, left as an excercise for the doom guys
tf.keras.layers.Dense(64,
activation='relu',
kernel_initializer='uniform',
input_dim=input_dim),
tf.keras.layers.Dropout(0.1),
tf.keras.layers.Dense(64,
activation='relu',
kernel_initializer='uniform'),
tf.keras.layers.Dense(num_classes,
activation='softmax',
kernel_initializer='uniform')
])
def plot_history(history,
title='metrics',
zoom=1,
path=Path('./training_history.png')):
plt.style.use('dark_background')
fig = plt.figure(figsize=(16 * zoom, 8 * zoom))
plt.title(title)
plt.axis('off')
# summarize history for accuracy
fig.add_subplot(1, 2, 1)
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
# summarize history for loss
fig.add_subplot(1, 2, 2)
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
# Save the png
fig.savefig(path)
def main():
# fix random seed for reproducibility
np.random.seed(5)
# function boundaries
x_min, x_max, x_step = -4.5, 4.5, .9
y_min, y_max, y_step = -4.5, 4.5, .9
bnds = ((x_min, x_max), (y_min, y_max))
# create points
x1, y1 = np.meshgrid(np.arange(x_min, x_max + x_step, x_step),
np.arange(y_min, y_max + y_step, y_step))
# initial terrible minimum guess
x0 = [4., 4.]
f0 = objective(x0)
logging.info(f'f({x0} = {f0})')
# finding actual minima
minimum = minimize(objective, x0, bounds=bnds)
logging.info(
f'Minimum value for the function computed using scipy:\n{minimum}')
# optimization using NN
epochs = 60
learning_rate = 0.1 # initial value
decay_rate = 0.1
momentum = 0.8
# define the optimizer
sgd = tf.keras.optimizers.SGD(learning_rate=learning_rate,
momentum=momentum,
nesterov=False,
decay=decay_rate)
# data preprocessing # TODO: use own libraries
data = get_mnist()
input_dim = data['x_train'].shape[1]
batch_size = int(input_dim / 100)
# build the model
model = build(input_dim, 10)
# compile the model
model.compile(loss='categorical_crossentropy',
optimizer=sgd,
metrics=['accuracy'])
# learning rate change
def exp_decay(epoch):
return learning_rate * np.exp(-decay_rate * epoch)
lr_rate = tf.keras.callbacks.LearningRateScheduler(exp_decay)
# callbacks
callbacks_list = [tf.keras.callbacks.History(), lr_rate]
# fit
history = model.fit(data['x_train'],
data['y_train'],
batch_size=batch_size,
epochs=epochs,
callbacks=callbacks_list,
verbose=1,
validation_data=(data['x_test'], data['y_test']))
plot_history(history)
if __name__ == '__main__':
main()