models/CNN/lenet/gen_scripts/lenet_keras2.2.4.py

104 lines
3.8 KiB
Python
Raw Permalink Normal View History

2019-11-22 12:02:03 +01:00
import numpy as np
import pandas as pd
from time import time
from sklearn.model_selection import train_test_split
import keras
import keras.layers as layers
from keras import optimizers
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Model
from keras.layers import Input, Activation, Concatenate
from keras.layers import Flatten, Dropout
from keras.layers import Convolution2D, MaxPooling2D
from keras.layers import GlobalAveragePooling2D
# Training variables
EPOCHS = 30
BATCH_SIZE = 480
# dataset
def prepare_mnist_data(rows=28, cols=28, nb_classes=10, categorical=False, padding = True, debug = True):
""" Get MNIST data """
from keras.datasets import mnist
from keras.utils import np_utils
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(X_train.shape[0], rows, cols, 1)
X_test = X_test.reshape(X_test.shape[0], rows, cols, 1)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
X_train, X_validation, y_train, y_validation = train_test_split(X_train, y_train, test_size=0.2, random_state=0)
if padding:
# Pad images with 0s
X_train = np.pad(X_train, ((0,0),(2,2),(2,2),(0,0)), 'constant')
X_test = np.pad(X_test, ((0,0),(2,2),(2,2),(0,0)), 'constant')
X_validation = np.pad(X_validation, ((0,0),(2,2),(2,2),(0,0)), 'constant')
if categorical:
# convert class vectors to binary class matrices
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
y_validation = np_utils.to_categorical(y_validation, nb_classes)
if debug:
print('X_train shape:', X_train.shape)
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
print(X_validation.shape[0], 'validation samples')
if not categorical:
train_labels_count = np.unique(y_train, return_counts=True)
dataframe_train_labels = pd.DataFrame({'Label':train_labels_count[0], 'Count':train_labels_count[1]})
print(dataframe_train_labels)
return X_train, X_test, X_validation, y_train, y_test, y_validation
# SqueezeNet
def LeNet():
model = keras.Sequential()
model.add(layers.Conv2D(filters=6, kernel_size=(3, 3), activation='relu', input_shape=(32,32,1)))
model.add(layers.AveragePooling2D())
model.add(layers.Conv2D(filters=16, kernel_size=(3, 3), activation='relu'))
model.add(layers.AveragePooling2D())
model.add(layers.Flatten())
model.add(layers.Dense(units=120, activation='relu'))
model.add(layers.Dense(units=84, activation='relu'))
model.add(layers.Dense(units=10, activation = 'softmax'))
return model
# Get dataset
X_train, X_test, X_validation, y_train, y_test, y_validation = prepare_mnist_data(categorical = True, debug=True)
# Check
model = LeNet()
model.summary()
# Preparation
model.compile(loss=keras.losses.categorical_crossentropy, optimizer=keras.optimizers.Adam(), metrics=['accuracy'])
# Trainer and validator
train_generator = ImageDataGenerator().flow(X_train, y_train, batch_size=BATCH_SIZE)
validation_generator = ImageDataGenerator().flow(X_validation, y_validation, batch_size=BATCH_SIZE)
# Training
from keras.callbacks import TensorBoard
steps_per_epoch = X_train.shape[0]//BATCH_SIZE
validation_steps = X_validation.shape[0]//BATCH_SIZE
tensorboard = TensorBoard(log_dir="logs/{}".format(time()))
model.fit_generator(train_generator, steps_per_epoch=steps_per_epoch, epochs=EPOCHS,
validation_data=validation_generator, validation_steps=validation_steps,
shuffle=True, callbacks=[tensorboard])
# Saving the model in keras format (.h5)
model.save('lenet_keras_v2.2.4.h5')
print(">>> Model saved!")