Skip to content

Commit

Permalink
Run LSTM tests
Browse files Browse the repository at this point in the history
  • Loading branch information
OrestisAlpos committed Apr 27, 2017
1 parent e8f9303 commit 32f5c42
Show file tree
Hide file tree
Showing 22 changed files with 344 additions and 6 deletions.
129 changes: 129 additions & 0 deletions LSTMHandler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
import keras.utils
from keras.models import Sequential, model_from_json, load_model
from keras.layers import Dense, Activation, SimpleRNN, LSTM
from keras.utils.vis_utils import plot_model
import keras.utils.np_utils
from keras.utils.np_utils import to_categorical

import os
import numpy as np
import datetime
import random
from reader import Reader
from sklearn import metrics
import matplotlib.pyplot as plt
from sklearn import metrics

def MyMetrics(y_true, y_pred):
y_pred[y_pred<0.5] = 0.0
y_pred[y_pred>=0.5] = 1.0
y_true[y_true<0.5] = 0.0
y_true[y_true>=0.5] = 1.0

if np.count_nonzero(y_pred == 1.0) == y_pred.shape[0]:
y_pred[0]=0.0
if np.count_nonzero(y_pred == 0.0) == y_pred.shape[0]:
y_pred[0]= 1.0
if np.count_nonzero(y_true == 1.0) == y_true.shape[0]:
y_true[0]=0.0
if np.count_nonzero(y_true == 0.0) == y_true.shape[0]:
y_true[0]= 1.0
confusion = metrics.confusion_matrix(y_true, y_pred)
TP = confusion[1, 1]
TN = confusion[0, 0]
FP = confusion[0, 1]
FN = confusion[1, 0]
precision = TP / (TP + FP)
recall = TP / (TP + FN)
fscore = 2 * precision * recall / ( precision + recall)
return (precision, recall, fscore)


class LSTMHandler:

results_directory = './LSTMresults'
models_directory = './LSTMmodels'

def __init__(self, model_name, num_categories, loss, optimizer):
# GET THE MODEL
fp_model = open(os.path.join(self.models_directory, model_name + '.json'), 'r')
model_str = fp_model.read()
self.model = model_from_json(model_str)
self.model.compile(loss=loss, optimizer=optimizer, metrics=['accuracy'])
fp_model.close()

self.model_name = model_name
self.num_categories = num_categories
self.loss = loss
self.optimizer = optimizer


def fit_and_eval(self, x_train, y_train, x_test, y_test, nb_epoch, dataset_name): #batch_size is always 1 and shuffle is always False, so we don't pass them as parameters
self.results_file = os.path.join(self.results_directory, dataset_name + '.' + self.model_name)
self.write_result(self.model_name + ' ' + dataset_name + ' Loss:' + self.loss + ' Optimizer:' + self.optimizer + ' Dropout:No')
self.write_result('Epoch|Loss|Accuracy|Precision|Recall|Fscore')
res_loss = []
res_accuracy = []
res_precision = []
res_recall = []
res_fscore = []
x_train = x_train.reshape(x_train.shape[0], 1, -1)
x_test = x_test.reshape(x_test.shape[0], 1, -1)
if self.num_categories > 2:
y_train = to_categorical(y_train, self.num_categories)
y_test = to_categorical(y_test, self.num_categories)
for i in range(1, nb_epoch+1):
self.model.fit(x_train, y_train, batch_size=1, epochs=1, shuffle=False)
self.model.reset_states()
(loss, accuracy) = self.model.evaluate(x_test, y_test, batch_size=1)
self.model.reset_states()
res_loss.append(loss)
res_accuracy.append(accuracy)
(precision, recall, fscore) = (0,0,0)
if self.num_categories == 2:
y_pred = self.model.predict(x_test, batch_size=1)
self.model.reset_states()
(precision, recall, fscore) = MyMetrics(y_test, y_pred)
res_precision.append(precision)
res_recall.append(recall)
res_fscore.append(fscore)
self.write_result(str(i) +'|'+ str(loss) +'|'+ str(accuracy) +'|'+ str(precision) +'|'+ str(recall) +'|'+ str(fscore))

return (res_loss, res_accuracy, res_precision, res_recall, res_fscore)


def write_result(self, text):
fp = open(self.results_file, 'a')
fp.write(text + '\n')
fp.close()

# def save_weights():



@staticmethod
def plot_results(title, metric, results):
lns = []
for k in results.keys():
result = results[k]
myplot = plt.subplot()
myplot.grid(True)
myplot.set_xlabel("Epoch Number")
myplot.set_ylabel(metric)
x_Axis = np.arange(1, len(result)+1)
#myplot.xaxis.set_ticks(x_Axis)#np.arange( 1, len(x_Axis)+1, 1))
#myplot.set_xticklabels(x_Axis, rotation=0)
tokens = k.split('|')
loss = tokens[0]
if loss=='categorical_crossentropy' or loss=='binary_crossentropy':
loss = 'crossentropy'
optimizer = tokens[1]
line = myplot.plot(x_Axis, result, label = 'loss:' + loss + ' opt:' + optimizer)
lns = lns + line
box = myplot.get_position()
myplot.set_position([box.x0, box.y0 + box.height * 0.25, box.width, box.height * 0.75])
labs = [l.get_label() for l in lns]
plt.title(title)
lgd = plt.legend(lns, labs, loc='upper center', bbox_to_anchor=(0.5, -0.15), fancybox=True, shadow=True, ncol=2)
plt.savefig('./LSTMresults/' + title + '.png')
plt.clf()
26 changes: 26 additions & 0 deletions LSTM_1.2A.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@

from keras.models import Sequential
from keras.layers import Dense, Activation, LSTM
from keras.utils.vis_utils import plot_model
import os
import numpy as np
from reader import Reader


length = Reader.getInputShape()

model = Sequential()

#EXPECTS INPUT AS (nb_sample, timesteps, nb_features), where nb_sample=1 (batch_size = 1), timesteps = 1 and nb_features = length

#model.add(Dense(40, input_dim = 12, init='uniform', activation='relu'))
model.add(LSTM(units=50, input_shape=(1,length), batch_input_shape=(1,1,length), recurrent_initializer='random_uniform', kernel_initializer='random_uniform', activation='sigmoid', stateful=True))
model.add(Dense(1, kernel_initializer='random_uniform', activation = 'sigmoid'))


model.summary()
plot_model(model, to_file='./LSTMmodels/LSTM_1.2A.png', show_shapes=True)
fp = open('./LSTMmodels/LSTM_1.2A.json', 'w')
fp.write(model.to_json())
fp.close()

4 changes: 2 additions & 2 deletions LSTM_1A.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
#EXPECTS INPUT AS (nb_sample, timesteps, nb_features), where nb_sample=1 (batch_size = 1), timesteps = 1 and nb_features = length

#model.add(Dense(40, input_dim = 12, init='uniform', activation='relu'))
model.add(LSTM(units=50, input_shape=(1,length), batch_input_shape=(1,1,length), recurrent_initializer='uniform', kernel_initializer='uniform', activation='relu', stateful=True))
model.add(Dense(1, kernel_initializer='uniform', activation = 'sigmoid'))
model.add(LSTM(units=50, input_shape=(1,length), batch_input_shape=(1,1,length), recurrent_initializer='random_uniform', kernel_initializer='random_uniform', activation='relu', stateful=True))
model.add(Dense(1, kernel_initializer='random_uniform', activation = 'sigmoid'))


model.summary()
Expand Down
27 changes: 27 additions & 0 deletions LSTM_2.2A.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@

from keras.models import Sequential
from keras.layers import Dense, Activation, LSTM
from keras.utils.vis_utils import plot_model
import os
import numpy as np
from reader import Reader


length = Reader.getInputShape()

model = Sequential()

#EXPECTS INPUT AS (nb_sample, timesteps, nb_features), where nb_sample=1 (batch_size = 1), timesteps = 1 and nb_features = length

#model.add(Dense(40, input_dim = 12, init='uniform', activation='relu'))
model.add(LSTM(units=50, input_shape=(1,length), batch_input_shape=(1,1,length), recurrent_initializer='random_uniform', kernel_initializer='random_uniform', activation='sigmoid', return_sequences=True, stateful=True))
model.add(LSTM(units=50, recurrent_initializer='random_uniform', kernel_initializer='random_uniform', activation='sigmoid', stateful=True))
model.add(Dense(1, kernel_initializer='random_uniform', activation = 'sigmoid'))


model.summary()
plot_model(model, to_file='./LSTMmodels/LSTM_2.2A.png', show_shapes=True)
fp = open('./LSTMmodels/LSTM_2.2A.json', 'w')
fp.write(model.to_json())
fp.close()

28 changes: 28 additions & 0 deletions LSTM_3.2A.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@

from keras.models import Sequential
from keras.layers import Dense, Activation, LSTM
from keras.utils.vis_utils import plot_model
import os
import numpy as np
from reader import Reader


length = Reader.getInputShape()

model = Sequential()

#EXPECTS INPUT AS (nb_sample, timesteps, nb_features), where nb_sample=1 (batch_size = 1), timesteps = 1 and nb_features = length

#model.add(Dense(40, input_dim = 12, init='uniform', activation='relu'))
model.add(LSTM(units=50, input_shape=(1,length), batch_input_shape=(1,1,length), recurrent_initializer='random_uniform', kernel_initializer='random_uniform', activation='sigmoid', return_sequences=True, stateful=True))
model.add(LSTM(units=40, recurrent_initializer='random_uniform', kernel_initializer='random_uniform', activation='sigmoid', stateful=True, return_sequences=True))
model.add(LSTM(units=30, recurrent_initializer='random_uniform', kernel_initializer='random_uniform', activation='sigmoid', stateful=True))
model.add(Dense(1, kernel_initializer='random_uniform', activation = 'sigmoid'))


model.summary()
plot_model(model, to_file='./LSTMmodels/LSTM_3.2A.png', show_shapes=True)
fp = open('./LSTMmodels/LSTM_3.2A.json', 'w')
fp.write(model.to_json())
fp.close()

87 changes: 87 additions & 0 deletions LSTM_plot_all.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import keras.utils
from keras.models import Sequential, model_from_json, load_model
from keras.layers import Dense, Activation, SimpleRNN, LSTM
from keras.utils.vis_utils import plot_model
import keras.utils.np_utils
from keras.utils.np_utils import to_categorical

import os
import numpy as np
import datetime
import random
from reader import Reader
from sklearn import metrics
import matplotlib.pyplot as plt
from sklearn import metrics
import LSTMHandler


#(1, 'Dataset1', 2, 'RNN_1A')
#(2, 'Dataset2', 2, 'RNN_1A')
#(3, 'Dataset3', 2, 'RNN_1A')
#(4, 'Dataset4', 2, 'RNN_1A')
#(5, 'Dataset5', 3, 'RNN_1B')
#(6, 'Dataset0', 5, 'RNN_1C')
dataset_id = 3
dataset_name = 'Dataset3'
num_classes = 2
#RNN_name = 'RNN_1A'

num_epochs = 10

(x_train, y_train), (x_test, y_test) = Reader.getDataset(dataset_id)
#x_train = x_train[0:1000,:]
#y_train = y_train[0:1000]
#x_test = x_test[0:1000,:]
#y_test = y_test[0:1000]

for LSTM_name in ['LSTM_1.2A','LSTM_2.2A', 'LSTM_3.2A']:
results = {}
for loss,optimizer in [('binary_crossentropy','rmsprop')]: #categorical_crossentropy
#for optimizer in ['sgd', 'rmsprop']:
LSTMmodel = LSTMHandler.LSTMHandler(LSTM_name, num_classes, loss, optimizer)
(res_loss, res_accuracy, res_precision, res_recall, res_fscore) = LSTMmodel.fit_and_eval(x_train, y_train, x_test, y_test, num_epochs, dataset_name)
if num_classes == 2:
results[loss + '|' + optimizer] = res_fscore
else:
results[loss + '|' + optimizer] = res_accuracy

title = dataset_name + '.' + LSTM_name
metric = 'accuracy'
if num_classes == 2:
metric = 'fscore'
LSTMHandler.LSTMHandler.plot_results(title, metric, results)



#(6, 'Dataset0', 5, 'RNN_1C') +categorical
#dataset_id = 6
#dataset_name = 'Dataset0'
#num_classes = 5
#RNN_name = 'RNN_1C'
#
#num_epochs = 10
##
#fp_logfile = open('./debug/logfile', "a")
#reader = Reader(fp_logfile, False)
#(x_train, y_train), (x_test, y_test) = reader.getDataNormalized()
#x_train = x_train[0:1000,:]
#y_train = y_train[0:1000]
#x_test = x_test[0:1000,:]
#y_test = y_test[0:1000]
#results = {}
#for loss in ['mse', 'categorical_crossentropy']:
#for optimizer in ['sgd','adagrad', 'rmsprop']:
# RNNmodel = RNNHandler.RNNHandler(RNN_name, num_classes, loss, optimizer)
# #(res_loss, res_accuracy, res_precision, res_recall, res_fscore) = RNNmodel.fit_and_eval(x_train, y_train, x_test, y_test, num_epochs, #dataset_name)
#if num_classes == 2:
# results[loss + '|' + optimizer] = res_fscore
#else:
# results[loss + '|' + optimizer] = res_accuracy
#
#title = dataset_name + '.' + RNN_name
#metric = 'accuracy'
#if num_classes == 2:
#metric = 'fscore'
#RNNHandler.RNNHandler.plot_results(title, metric, results)
#
1 change: 1 addition & 0 deletions LSTMmodels/LSTM_1.2A.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"class_name": "Sequential", "config": [{"class_name": "LSTM", "config": {"name": "lstm_1", "recurrent_regularizer": null, "recurrent_initializer": {"class_name": "RandomUniform", "config": {"minval": -0.05, "seed": null, "maxval": 0.05}}, "go_backwards": false, "kernel_constraint": null, "kernel_regularizer": null, "recurrent_activation": "hard_sigmoid", "trainable": true, "unroll": false, "batch_input_shape": [1, 1, 79], "return_sequences": false, "implementation": 0, "kernel_initializer": {"class_name": "RandomUniform", "config": {"minval": -0.05, "seed": null, "maxval": 0.05}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "activation": "sigmoid", "recurrent_dropout": 0.0, "stateful": true, "unit_forget_bias": true, "dtype": "float32", "activity_regularizer": null, "bias_regularizer": null, "dropout": 0.0, "use_bias": true, "recurrent_constraint": null, "bias_constraint": null, "units": 50}}, {"class_name": "Dense", "config": {"name": "dense_1", "kernel_initializer": {"class_name": "RandomUniform", "config": {"minval": -0.05, "seed": null, "maxval": 0.05}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "activation": "sigmoid", "use_bias": true, "activity_regularizer": null, "kernel_regularizer": null, "bias_regularizer": null, "trainable": true, "kernel_constraint": null, "units": 1, "bias_constraint": null}}], "keras_version": "2.0.3", "backend": "theano"}
Binary file added LSTMmodels/LSTM_1.2A.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
2 changes: 1 addition & 1 deletion LSTMmodels/LSTM_1A.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
{"config": [{"config": {"recurrent_regularizer": null, "batch_input_shape": [1, 1, 79], "implementation": 0, "recurrent_activation": "hard_sigmoid", "name": "lstm_1", "return_sequences": false, "use_bias": true, "activation": "relu", "go_backwards": false, "kernel_constraint": null, "trainable": true, "kernel_regularizer": null, "bias_initializer": {"config": {}, "class_name": "Zeros"}, "unroll": false, "dropout": 0.0, "kernel_initializer": {"config": {"minval": -0.05, "seed": null, "maxval": 0.05}, "class_name": "RandomUniform"}, "activity_regularizer": null, "recurrent_initializer": {"config": {"minval": -0.05, "seed": null, "maxval": 0.05}, "class_name": "RandomUniform"}, "units": 50, "stateful": true, "bias_constraint": null, "recurrent_constraint": null, "bias_regularizer": null, "unit_forget_bias": true, "dtype": "float32", "recurrent_dropout": 0.0}, "class_name": "LSTM"}, {"config": {"bias_initializer": {"config": {}, "class_name": "Zeros"}, "kernel_initializer": {"config": {"minval": -0.05, "seed": null, "maxval": 0.05}, "class_name": "RandomUniform"}, "activity_regularizer": null, "bias_constraint": null, "units": 1, "name": "dense_1", "use_bias": true, "kernel_constraint": null, "trainable": true, "bias_regularizer": null, "kernel_regularizer": null, "activation": "sigmoid"}, "class_name": "Dense"}], "keras_version": "2.0.3", "backend": "theano", "class_name": "Sequential"}
{"class_name": "Sequential", "config": [{"class_name": "LSTM", "config": {"recurrent_initializer": {"class_name": "RandomUniform", "config": {"seed": null, "maxval": 0.05, "minval": -0.05}}, "recurrent_constraint": null, "activity_regularizer": null, "unit_forget_bias": true, "kernel_constraint": null, "units": 50, "kernel_regularizer": null, "go_backwards": false, "implementation": 0, "bias_regularizer": null, "trainable": true, "unroll": false, "name": "lstm_1", "recurrent_regularizer": null, "dropout": 0.0, "activation": "relu", "return_sequences": false, "kernel_initializer": {"class_name": "RandomUniform", "config": {"seed": null, "maxval": 0.05, "minval": -0.05}}, "use_bias": true, "batch_input_shape": [1, 1, 79], "recurrent_dropout": 0.0, "recurrent_activation": "hard_sigmoid", "bias_constraint": null, "stateful": true, "dtype": "float32", "bias_initializer": {"class_name": "Zeros", "config": {}}}}, {"class_name": "Dense", "config": {"name": "dense_1", "activity_regularizer": null, "kernel_constraint": null, "use_bias": true, "units": 1, "kernel_regularizer": null, "activation": "sigmoid", "kernel_initializer": {"class_name": "RandomUniform", "config": {"seed": null, "maxval": 0.05, "minval": -0.05}}, "bias_regularizer": null, "trainable": true, "bias_constraint": null, "bias_initializer": {"class_name": "Zeros", "config": {}}}}], "backend": "theano", "keras_version": "2.0.3"}
1 change: 1 addition & 0 deletions LSTMmodels/LSTM_2.2A.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"keras_version": "2.0.3", "config": [{"class_name": "LSTM", "config": {"dtype": "float32", "kernel_regularizer": null, "unit_forget_bias": true, "bias_initializer": {"class_name": "Zeros", "config": {}}, "go_backwards": false, "unroll": false, "recurrent_constraint": null, "return_sequences": true, "trainable": true, "stateful": true, "name": "lstm_1", "kernel_constraint": null, "recurrent_initializer": {"class_name": "RandomUniform", "config": {"maxval": 0.05, "minval": -0.05, "seed": null}}, "recurrent_regularizer": null, "dropout": 0.0, "bias_constraint": null, "recurrent_dropout": 0.0, "batch_input_shape": [1, 1, 79], "activation": "sigmoid", "units": 50, "implementation": 0, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "kernel_initializer": {"class_name": "RandomUniform", "config": {"maxval": 0.05, "minval": -0.05, "seed": null}}, "recurrent_activation": "hard_sigmoid"}}, {"class_name": "LSTM", "config": {"kernel_regularizer": null, "unit_forget_bias": true, "bias_initializer": {"class_name": "Zeros", "config": {}}, "go_backwards": false, "unroll": false, "recurrent_constraint": null, "return_sequences": false, "trainable": true, "stateful": true, "name": "lstm_2", "kernel_constraint": null, "recurrent_initializer": {"class_name": "RandomUniform", "config": {"maxval": 0.05, "minval": -0.05, "seed": null}}, "recurrent_regularizer": null, "dropout": 0.0, "bias_constraint": null, "recurrent_dropout": 0.0, "activation": "sigmoid", "units": 50, "implementation": 0, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "kernel_initializer": {"class_name": "RandomUniform", "config": {"maxval": 0.05, "minval": -0.05, "seed": null}}, "recurrent_activation": "hard_sigmoid"}}, {"class_name": "Dense", "config": {"bias_constraint": null, "kernel_regularizer": null, "bias_initializer": {"class_name": "Zeros", "config": {}}, "activation": "sigmoid", "units": 1, "activity_regularizer": null, "use_bias": true, "bias_regularizer": null, "trainable": true, "kernel_initializer": {"class_name": "RandomUniform", "config": {"maxval": 0.05, "minval": -0.05, "seed": null}}, "kernel_constraint": null, "name": "dense_1"}}], "backend": "theano", "class_name": "Sequential"}
Binary file added LSTMmodels/LSTM_2.2A.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 32f5c42

Please sign in to comment.