Skip to content

Commit

Permalink
Added code for creating, training and evaluating RNNs.
Browse files Browse the repository at this point in the history
  • Loading branch information
OrestisAlpos committed Apr 13, 2017
1 parent 329e2ad commit 7595787
Show file tree
Hide file tree
Showing 31 changed files with 1,329 additions and 0 deletions.
126 changes: 126 additions & 0 deletions RNNHandler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
import keras.utils
from keras.models import Sequential, model_from_json, load_model
from keras.layers import Dense, Activation, SimpleRNN
from keras.utils.visualize_util import plot
import keras.utils.np_utils
from keras.utils.np_utils import to_categorical

import os
import numpy as np
import datetime
import random
from reader import Reader
from sklearn import metrics
import matplotlib.pyplot as plt
from sklearn import metrics

def MyMetrics(y_true, y_pred):
y_pred[y_pred<0.5] = 0
y_pred[y_pred>=0.5] = 1
if np.count_nonzero(y_pred == 1) == y_pred.shape[0]:
y_pred[0]=0
if np.count_nonzero(y_pred == 0) == y_pred.shape[0]:
y_pred[0]= 1
if np.count_nonzero(y_true == 1) == y_true.shape[0]:
y_true[0]=0
if np.count_nonzero(y_true == 0) == y_true.shape[0]:
y_true[0]= 1
confusion = metrics.confusion_matrix(y_true, y_pred)
TP = confusion[1, 1]
TN = confusion[0, 0]
FP = confusion[0, 1]
FN = confusion[1, 0]
precision = TP / (TP + FP)
recall = TP / (TP + FN)
fscore = 2 * precision * recall / ( precision + recall)
return (precision, recall, fscore)


class RNNHandler:

results_directory = '/home/orestis/net/RNNresults'
models_directory = '/home/orestis/net/RNNmodels'

def __init__(self, model_name, num_categories, loss, optimizer):
# GET THE MODEL
fp_model = open(os.path.join(self.models_directory, model_name + '.json'), 'r')
model_str = fp_model.read()
self.model = model_from_json(model_str)
self.model.compile(loss=loss, optimizer=optimizer, metrics=['accuracy'])
fp_model.close()

self.model_name = model_name
self.num_categories = num_categories
self.loss = loss
self.optimizer = optimizer


def fit_and_eval(self, x_train, y_train, x_test, y_test, nb_epoch, dataset_name): #batch_size is always 1 and shuffle is always False, so we don't pass them as parameters
self.results_file = os.path.join(self.results_directory, self.model_name + '.' + dataset_name)
self.write_result('Model:' + self.model_name + ' Dataset:' + dataset_name + ' Loss:' + self.loss + ' Optimizer:' + self.optimizer + ' Dropout:No')
self.write_result('Epoch|Loss|Accuracy|Precision|Recall|Fscore')
res_loss = []
res_accuracy = []
res_precision = []
res_recall = []
res_fscore = []
x_train = x_train.reshape(x_train.shape[0], 1, -1)
x_test = x_test.reshape(x_test.shape[0], 1, -1)
if self.num_categories > 2:
y_train = to_categorical(y_train, self.num_categories)
y_test = to_categorical(y_test, self.num_categories)
for i in range(1, nb_epoch+1):
self.model.fit(x_train, y_train, batch_size=1, nb_epoch=1, shuffle=False)
self.model.reset_states()
(loss, accuracy) = self.model.evaluate(x_test, y_test, batch_size=1)
self.model.reset_states()
res_loss.append(loss)
res_accuracy.append(accuracy)
(precision, recall, fscore) = (0,0,0)
if self.num_categories == 2:
y_pred = self.model.predict(x_test, batch_size=1)
self.model.reset_states()
(precision, recall, fscore) = MyMetrics(y_test, y_pred)
res_precision.append(precision)
res_recall.append(recall)
res_fscore.append(fscore)
self.write_result(str(i) +'|'+ str(loss) +'|'+ str(accuracy) +'|'+ str(precision) +'|'+ str(recall) +'|'+ str(fscore))

return (res_loss, res_accuracy, res_precision, res_recall, res_fscore)


def write_result(self, text):
fp = open(self.results_file, 'a')
fp.write(text + '\n')
fp.close()

# def save_weights():



@staticmethod
def plot_results(title, metric, results):
lns = []
for k in results.keys():
result = results[k]
myplot = plt.subplot()
myplot.grid(True)
myplot.set_xlabel("Epoch Number")
myplot.set_ylabel(metric)
x_Axis = np.arange(1, len(result)+1)
#myplot.xaxis.set_ticks(x_Axis)#np.arange( 1, len(x_Axis)+1, 1))
#myplot.set_xticklabels(x_Axis, rotation=0)
tokens = k.split('|')
loss = tokens[0]
if loss=='categorical_crossentropy' or loss=='binary_crossentropy':
loss = 'crossentropy'
optimizer = tokens[1]
line = myplot.plot(x_Axis, result, label = 'loss:' + loss + ' opt:' + optimizer)
lns = lns + line
box = myplot.get_position()
myplot.set_position([box.x0, box.y0 + box.height * 0.30, box.width, box.height * 0.70])
labs = [l.get_label() for l in lns]
plt.title(title)
lgd = plt.legend(lns, labs, loc='upper center', bbox_to_anchor=(0.5, -0.12), fancybox=True, shadow=True, ncol=2)
plt.savefig('/home/orestis/net/RNNresults/' + title + '.png')
plt.clf()
26 changes: 26 additions & 0 deletions RNN_1A.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@

from keras.models import Sequential
from keras.layers import Dense, Activation, SimpleRNN
from keras.utils.visualize_util import plot
import os
import numpy as np
from reader import Reader


length = Reader.getInputShape()

model = Sequential()

#EXPECTS INPUT AS (nb_sample, timesteps, nb_features), where nb_sample=1 (batch_size = 1), timesteps = 1 and nb_features = length

#model.add(Dense(40, input_dim = 12, init='uniform', activation='relu'))
model.add(SimpleRNN(output_dim=50, input_shape=(1,length), batch_input_shape=(1,1,length), init='uniform', inner_init='uniform', activation='relu', stateful=True))
model.add(Dense(1, init='uniform', activation = 'sigmoid'))


model.summary()
plot(model, to_file='/home/orestis/net/RNNmodels/RNN_1A.png')
fp = open('/home/orestis/net/RNNmodels/RNN_1A.json', 'w')
fp.write(model.to_json())
fp.close()

26 changes: 26 additions & 0 deletions RNN_1B.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@

from keras.models import Sequential
from keras.layers import Dense, Activation, SimpleRNN
from keras.utils.visualize_util import plot
import os
import numpy as np
from reader import Reader


length = Reader.getInputShape()

model = Sequential()

#EXPECTS INPUT AS (nb_sample, timesteps, nb_features), where nb_sample=1 (batch_size = 1), timesteps = 1 and nb_features = length

#model.add(Dense(40, input_dim = 12, init='uniform', activation='relu'))
model.add(SimpleRNN(output_dim=50, input_shape=(1,length), batch_input_shape=(1,1,length), init='uniform', inner_init='uniform', activation='relu', stateful=True))
model.add(Dense(3, init='uniform', activation = 'softmax'))


model.summary()
plot(model, to_file='/home/orestis/net/RNNmodels/RNN_1B.png')
fp = open('/home/orestis/net/RNNmodels/RNN_1B.json', 'w')
fp.write(model.to_json())
fp.close()

26 changes: 26 additions & 0 deletions RNN_1C.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@

from keras.models import Sequential
from keras.layers import Dense, Activation, SimpleRNN
from keras.utils.visualize_util import plot
import os
import numpy as np
from reader import Reader


length = Reader.getInputShape()

model = Sequential()

#EXPECTS INPUT AS (nb_sample, timesteps, nb_features), where nb_sample=1 (batch_size = 1), timesteps = 1 and nb_features = length

#model.add(Dense(40, input_dim = 12, init='uniform', activation='relu'))
model.add(SimpleRNN(output_dim=50, input_shape=(1,length), batch_input_shape=(1,1,length), init='uniform', inner_init='uniform', activation='relu', stateful=True))
model.add(Dense(5, init='uniform', activation = 'softmax'))


model.summary()
plot(model, to_file='/home/orestis/net/RNNmodels/RNN_1C.png')
fp = open('/home/orestis/net/RNNmodels/RNN_1C.json', 'w')
fp.write(model.to_json())
fp.close()

85 changes: 85 additions & 0 deletions RNN_plot_all.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
import keras.utils
from keras.models import Sequential, model_from_json, load_model
from keras.layers import Dense, Activation, SimpleRNN
from keras.utils.visualize_util import plot
import keras.utils.np_utils
from keras.utils.np_utils import to_categorical

import os
import numpy as np
import datetime
import random
from reader import Reader
from sklearn import metrics
import matplotlib.pyplot as plt
from sklearn import metrics
import RNNHandler


#(1, 'Dataset1', 2, 'RNN_1A')
#(2, 'Dataset2', 2, 'RNN_1A')
#(3, 'Dataset3', 2, 'RNN_1A') !! grafiki
#(4, 'Dataset4', 2, 'RNN_1A') !!! mixed type binary and continous
#(5, 'Dataset5', 3, 'RNN_1B') +categorical
dataset_id = 5
dataset_name = 'Dataset5'
num_classes = 3
RNN_name = 'RNN_1B'

num_epochs = 10

(x_train, y_train), (x_test, y_test) = Reader.getDataset(dataset_id)
x_train = x_train[0:1000,:]
y_train = y_train[0:1000]
x_test = x_test[0:1000,:]
y_test = y_test[0:1000]

results = {}
for loss in ['mae', 'mse', 'categorical_crossentropy']: #categorical_crossentropy
for optimizer in ['sgd','adagrad', 'rmsprop']:
RNNmodel = RNNHandler.RNNHandler(RNN_name, num_classes, loss, optimizer)
(res_loss, res_accuracy, res_precision, res_recall, res_fscore) = RNNmodel.fit_and_eval(x_train, y_train, x_test, y_test, num_epochs, dataset_name)
if num_classes == 2:
results[loss + '|' + optimizer] = res_fscore
else:
results[loss + '|' + optimizer] = res_accuracy

title = 'Dataset ' + dataset_name + ', ' + 'Model ' + RNN_name
metric = 'accuracy'
if num_classes == 2:
metric = 'fscore'
RNNHandler.RNNHandler.plot_results(title, metric, results)



#(6, 'Dataset0', 5, 'RNN_1C') +categorical
dataset_id = 6
dataset_name = 'Dataset0'
num_classes = 5
RNN_name = 'RNN_1C'

num_epochs = 10

fp_logfile = open('/home/orestis/net/debug/logfile', "a")
reader = Reader(fp_logfile, False)
(x_train, y_train), (x_test, y_test) = reader.getDataNormalized()
x_train = x_train[0:1000,:]
y_train = y_train[0:1000]
x_test = x_test[0:1000,:]
y_test = y_test[0:1000]
results = {}
for loss in ['mae', 'mse', 'categorical_crossentropy']:
for optimizer in ['sgd','adagrad', 'rmsprop']:
RNNmodel = RNNHandler.RNNHandler(RNN_name, num_classes, loss, optimizer)
(res_loss, res_accuracy, res_precision, res_recall, res_fscore) = RNNmodel.fit_and_eval(x_train, y_train, x_test, y_test, num_epochs, dataset_name)
if num_classes == 2:
results[loss + '|' + optimizer] = res_fscore
else:
results[loss + '|' + optimizer] = res_accuracy

title = 'Dataset: ' + dataset_name + ', ' + 'Model: ' + RNN_name
metric = 'accuracy'
if num_classes == 2:
metric = 'fscore'
RNNHandler.RNNHandler.plot_results(title, metric, results)

1 change: 1 addition & 0 deletions RNNmodels/RNN_1.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"class_name": "Sequential", "config": [{"class_name": "SimpleRNN", "config": {"b_regularizer": null, "U_regularizer": null, "W_regularizer": null, "activation": "relu", "go_backwards": false, "init": "uniform", "inner_init": "uniform", "dropout_U": 0.0, "input_dtype": "float32", "batch_input_shape": [1, 1, 79], "name": "simplernn_1", "return_sequences": false, "dropout_W": 0.0, "unroll": false, "trainable": true, "stateful": true, "consume_less": "cpu", "output_dim": 50}}, {"class_name": "Dense", "config": {"b_regularizer": null, "W_regularizer": null, "activation": "softmax", "activity_regularizer": null, "b_constraint": null, "init": "uniform", "bias": true, "W_constraint": null, "input_dim": null, "output_dim": 3, "trainable": true, "name": "dense_1"}}], "keras_version": "1.1.1"}
Binary file added RNNmodels/RNN_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions RNNmodels/RNN_1A.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"keras_version": "1.1.1", "config": [{"config": {"dropout_U": 0.0, "name": "simplernn_1", "stateful": true, "input_dtype": "float32", "unroll": false, "U_regularizer": null, "init": "uniform", "inner_init": "uniform", "dropout_W": 0.0, "output_dim": 50, "batch_input_shape": [1, 1, 79], "return_sequences": false, "trainable": true, "W_regularizer": null, "go_backwards": false, "b_regularizer": null, "consume_less": "cpu", "activation": "relu"}, "class_name": "SimpleRNN"}, {"config": {"name": "dense_1", "activity_regularizer": null, "trainable": true, "output_dim": 1, "b_constraint": null, "bias": true, "activation": "sigmoid", "input_dim": null, "init": "uniform", "W_regularizer": null, "W_constraint": null, "b_regularizer": null}, "class_name": "Dense"}], "class_name": "Sequential"}
Binary file added RNNmodels/RNN_1A.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions RNNmodels/RNN_1B.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"class_name": "Sequential", "keras_version": "1.1.1", "config": [{"class_name": "SimpleRNN", "config": {"consume_less": "cpu", "inner_init": "uniform", "return_sequences": false, "output_dim": 50, "W_regularizer": null, "go_backwards": false, "batch_input_shape": [1, 1, 79], "trainable": true, "b_regularizer": null, "stateful": true, "unroll": false, "U_regularizer": null, "activation": "relu", "input_dtype": "float32", "name": "simplernn_1", "dropout_U": 0.0, "dropout_W": 0.0, "init": "uniform"}}, {"class_name": "Dense", "config": {"activity_regularizer": null, "output_dim": 3, "W_regularizer": null, "W_constraint": null, "b_constraint": null, "activation": "softmax", "trainable": true, "b_regularizer": null, "bias": true, "input_dim": null, "name": "dense_1", "init": "uniform"}}]}
Binary file added RNNmodels/RNN_1B.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
1 change: 1 addition & 0 deletions RNNmodels/RNN_1C.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"keras_version": "1.1.1", "class_name": "Sequential", "config": [{"class_name": "SimpleRNN", "config": {"trainable": true, "go_backwards": false, "input_dtype": "float32", "activation": "relu", "unroll": false, "U_regularizer": null, "init": "uniform", "b_regularizer": null, "batch_input_shape": [1, 1, 79], "name": "simplernn_1", "consume_less": "cpu", "inner_init": "uniform", "stateful": true, "output_dim": 50, "dropout_U": 0.0, "return_sequences": false, "dropout_W": 0.0, "W_regularizer": null}}, {"class_name": "Dense", "config": {"trainable": true, "input_dim": null, "activation": "softmax", "W_constraint": null, "activity_regularizer": null, "b_constraint": null, "init": "uniform", "b_regularizer": null, "name": "dense_1", "output_dim": 5, "bias": true, "W_regularizer": null}}]}
Binary file added RNNmodels/RNN_1C.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added RNNresults/Dataset Dataset0, Model RNN_1C.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added RNNresults/Dataset Dataset1, RNN RNN_1A.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added RNNresults/Dataset Dataset2, RNN RNN_1A.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added RNNresults/Dataset Dataset3, RNN RNN_1A.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added RNNresults/Dataset: Dataset5, Model RNN_1B.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
45 changes: 45 additions & 0 deletions RNNresults/OLD/RNNresultsDataset2
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
RNN:: Loss Function:binary_crossentropy, Optimizer:adagrad, Batch Size:1, Dropout Rate:No
Loss Function: binary_crossentropy Epoch: 0 Prec: 0.995253584705 Rec: 0.9995 F: 0.997372272485
Loss Function: binary_crossentropy Epoch: 1 Prec: 0.995452181514 Rec: 0.999577777778 F: 0.99751071391
Loss Function: binary_crossentropy Epoch: 2 Prec: 0.9956066089 Rec: 0.999622222222 F: 0.997610374633
Loss Function: binary_crossentropy Epoch: 3 Prec: 0.995760976636 Rec: 0.999644444444 F: 0.997698931528
Loss Function: binary_crossentropy Epoch: 4 Prec: 0.995838175881 Rec: 0.999655555556 F: 0.997743214395
Loss Function: binary_crossentropy Epoch: 5 Prec: 0.995926454798 Rec: 0.999677777778 F: 0.997798590432
Loss Function: binary_crossentropy Epoch: 6 Prec: 0.99594877301 Rec: 0.999744444444 F: 0.997842999174
Loss Function: binary_crossentropy Epoch: 7 Prec: 0.99601487779 Rec: 0.999733333333 F: 0.997870641469
Loss Function: binary_crossentropy Epoch: 8 Prec: 0.9960700962 Rec: 0.999755555556 F: 0.997909423123
Loss Function: binary_crossentropy Epoch: 9 Prec: 0.99611420474 Rec: 0.999755555556 F: 0.997931558429
Loss Function: binary_crossentropy Epoch: 10 Prec: 0.996136388797 Rec: 0.999788888889 F: 0.997959296845
Loss Function: binary_crossentropy Epoch: 11 Prec: 0.996191530584 Rec: 0.999788888889 F: 0.997986967974
Loss Function: binary_crossentropy Epoch: 12 Prec: 0.99623564841 Rec: 0.999788888889 F: 0.998009105983
Loss Function: binary_crossentropy Epoch: 13 Prec: 0.996323936488 Rec: 0.9998 F: 0.998058941624
Loss Function: binary_crossentropy Epoch: 14 Prec: 0.996357073257 Rec: 0.999811111111 F: 0.998081103864
Loss Function: binary_crossentropy Epoch: 15 Prec: 0.996379058334 Rec: 0.999788888889 F: 0.998081061295
Loss Function: binary_crossentropy Epoch: 16 Prec: 0.996434226991 Rec: 0.999788888889 F: 0.998108739178
Loss Function: binary_crossentropy Epoch: 17 Prec: 0.996467331118 Rec: 0.999788888889 F: 0.998125346644
Loss Function: binary_crossentropy Epoch: 18 Prec: 0.996478288315 Rec: 0.999766666667 F: 0.998119769048
Loss Function: binary_crossentropy Epoch: 19 Prec: 0.996522432525 Rec: 0.999766666667 F: 0.998141913441
Loss Function: binary_crossentropy Epoch: 20 Prec: 0.99655554325 Rec: 0.999766666667 F: 0.998158522381
RNN:: Loss Function:mse, Optimizer:adagrad, Batch Size:1, Dropout Rate:No
Loss Function: mse Epoch: 0 Prec: 0.996677004874 Rec: 0.999777777778 F: 0.998224983359
Loss Function: mse Epoch: 1 Prec: 0.996732098547 Rec: 0.999744444444 F: 0.998235998935
Loss Function: mse Epoch: 2 Prec: 0.996765188107 Rec: 0.999733333333 F: 0.998247054386
Loss Function: mse Epoch: 3 Prec: 0.996798245128 Rec: 0.999711111111 F: 0.998252553214
Loss Function: mse Epoch: 4 Prec: 0.996809182464 Rec: 0.999677777778 F: 0.998241419291
Loss Function: mse Epoch: 5 Prec: 0.996809111759 Rec: 0.999655555556 F: 0.99823030451
Loss Function: mse Epoch: 6 Prec: 0.996819979834 Rec: 0.9996 F: 0.998208054324
Loss Function: mse Epoch: 7 Prec: 0.996842000288 Rec: 0.999577777778 F: 0.998208014558
Loss Function: mse Epoch: 8 Prec: 0.996864057444 Rec: 0.999566666667 F: 0.998213532767
Loss Function: mse Epoch: 9 Prec: 0.996886047053 Rec: 0.999533333333 F: 0.99820793502
Loss Function: mse Epoch: 10 Prec: 0.99689699119 Rec: 0.9995 F: 0.998196798624
Loss Function: mse Epoch: 11 Prec: 0.996885909015 Rec: 0.999488888889 F: 0.998185702
Loss Function: mse Epoch: 12 Prec: 0.996918916523 Rec: 0.999444444444 F: 0.998180083006
Loss Function: mse Epoch: 13 Prec: 0.996940777442 Rec: 0.999366666667 F: 0.998152248098
Loss Function: mse Epoch: 14 Prec: 0.9969406418 Rec: 0.999322222222 F: 0.998130011375
Loss Function: mse Epoch: 15 Prec: 0.996940540061 Rec: 0.999288888889 F: 0.998113333185
Loss Function: mse Epoch: 16 Prec: 0.996929318907 Rec: 0.999233333333 F: 0.998079996449
Loss Function: mse Epoch: 17 Prec: 0.996940336563 Rec: 0.999222222222 F: 0.99807997514
Loss Function: mse Epoch: 18 Prec: 0.996929216784 Rec: 0.9992 F: 0.998063316778
Loss Function: mse Epoch: 19 Prec: 0.996918062592 Rec: 0.999166666667 F: 0.998041098095
Loss Function: mse Epoch: 20 Prec: 0.996929046563 Rec: 0.999144444444 F: 0.998035516093

Empty file.
Loading

0 comments on commit 7595787

Please sign in to comment.