-
Notifications
You must be signed in to change notification settings - Fork 13
/
Copy pathlauncher_package1.py
60 lines (46 loc) · 1.58 KB
/
launcher_package1.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
# Package 1
# from https://github.com/mnielsen/neural-networks-and-deep-learning
import layer
import mnist_loader
### Data Loading
training_data, validation_data, test_data = mnist_loader.load_data_wrapper(augmentation=False)
### Parameters
n_epoch = 3
learning_rate = 0.5
batch_size = 50
### Network Architecture
n_node_input = 784
n_node_hidden = 30
n_node_output = 10
net = layer.Network([n_node_input, n_node_hidden, n_node_output],
W_init='xavier', # or normal
b_init='zero', # or normal
cost=layer.CrossEntropyCost, # or QuadraticCost
act_fn=layer.Sigmoid # or Relu
)
### Training
# SGD
evaluation_cost, evaluation_accuracy, \
training_cost, training_accuracy = \
net.SGD(training_data, n_epoch, batch_size, learning_rate,
lmbda=0.0, # L2 regularization
evaluation_data=test_data,
monitor_evaluation_cost=False,
monitor_evaluation_accuracy=True,
monitor_training_cost=False,
monitor_training_accuracy=True)
### Plot results
import matplotlib.pyplot as plt
import numpy as np
idx = np.arange(1,n_epoch+1)
plt.plot(idx, evaluation_accuracy,'ro-', label='test acc.')
plt.plot(idx, training_accuracy,'bo-', label='training acc.')
legend = plt.legend(loc='upper center', shadow=True)
font = {'family' : 'normal',
'weight' : 'bold',
'size' : 15}
plt.rc('font', **font)
plt.xlabel('Epoch', fontsize=22)
plt.ylabel('Accuracy [%]', fontsize=22)
plt.grid(True)
plt.show()