-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathadalinesgd.py
138 lines (91 loc) · 2.58 KB
/
adalinesgd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
import numpy as np
from numpy.random import seed
class AdalineSGD(object):
""" ADAptive LInear NEuron classifier.
Parameters
-----------
eta : float
Learning rate (between 0.0 and 1.0)
n_iter : int
Passes over the training dataset.
Attributes
-----------
w_ : 1d-array
Weights after fitting.
errors_ : list
Number of misclassifications in every epoch.
shuffle : bool (deafult: True)
Shuffles training data every epoch if True
to prevent cycles.
random_state : int (default: None)
Set random state for shuffling and
initializing the weights.
"""
def __init__(self, eta = 0.01, n_iter = 10, shuffle= True,
random_state = None):
self.eta = eta
self.n_iter = n_iter
self.w_initialization = False
self.shuffle = shuffle
if random_state:
seed(random_state)
def fit(self, X, y):
""" Fit training data.
Parameters
------------
X : {array-like}, shape = [n_samples, n_features]
Training vectors, where n_samples is the
number of samples and n_features is the number
of features.
y : array-like, shape = [n_samples]
Target values.
Return
-------
self : object
"""
self._initialize_weights(X.shape[1])
self.cost_ = []
for i in range(self.n_iter):
if self.shuffle:
X, y = self._shuffle(X, y)
cost = []
for xi, target in zip(X, y):
cost.append(self._update_weights(xi, target))
avg_cost = sum(cost) / len(y)
self.cost_.append(avg_cost)
return self
def partial_fit(self, X, y):
""" Fit training data without reinitializing the weights """
if not self.w_initialized:
self._initialize_weights(X.shape[1])
if y.ravel().shape[0] > 1:
for xi, target in zip(X, y):
self._update_weights(xi, target)
else:
self._update_weights(X, y)
return self
def _shuffle(self, X, y):
""" Shuffle training data """
r = np.random.permutation(len(y))
return X[r], y[r]
def _initialize_weights(self, m):
""" Initialize weights to zeros """
self.w_ = np.zeros(1 + m)
self.w_initialized = True
def _update_weights(self, xi, target):
""" Apply Adaline learning rule to update the weights """
output = self.net_input(xi)
error = (target - output)
self.w_[1:] += self.eta * xi.dot(error)
self.w_[0] += self.eta * error
cost = 0.5 * (error ** 2)
return cost
def net_input(self, X):
""" Calculate net input """
return np.dot(X, self.w_[1:]) + self.w_[0]
def activation(self, X):
""" Compute linear activation """
return self.net_input(X)
def predict(self, X):
""" Return class label after the unit step """
return np.where(self.activation(X) >= 0.0, 1, -1)