-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathnorms.py
64 lines (51 loc) · 1.7 KB
/
norms.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
import tensorflow as tf
import numpy as np
import scipy.sparse.linalg;
from tensorflow.python.framework import function
# SGD
def Sgdnm(grad, wt):
return grad
def get_cgd(grad, wt, alpha, lamda, grad_type):
if grad_type == 3: # F
st = grad / frobenius_norm(grad)
elif grad_type == 4:
st = top_singular_vector(grad)
return ((1 - alpha) / alpha) * (wt + lamda * st)
def get_cgd_with_st(st, wt, alpha, lamda):
return ((1 - alpha) / alpha) * (wt + lamda * st)
# frobenius_norm
def frobenius_norm(M):
return tf.reduce_sum(M ** 2) ** 0.5
# 4-dim W
def cal_grad_set(gv, alpha, lamda, grad_type):
(G, W) = gv
g0 = unfold_conv_layer(G)
w0 = unfold_conv_layer(W)
sizes = tf.shape(w0)
s = tf.ones(shape=[sizes[0], 1, sizes[2]])
k = tf.constant(0)
def body(k, s):
r_2 = get_cgd(g0[:, k, :], w0[:, k, :], alpha, lamda, grad_type)
r_3 = tf.expand_dims(r_2, 1)
s = tf.concat([s, r_3], axis=1)
k = k + 1
return k, s
def condition(k,s):
return k < sizes[1]
_, s = tf.while_loop(cond=condition, body=body, loop_vars=[k, s], shape_invariants=[k.get_shape(), tf.TensorShape([None, None, None])])
s = s[:, 1:, :]
# s = tf.transpose(s, perm=[1, 0, 2])
g_new = tf.reshape(s, shape=tf.shape(W))
return g_new
# unfold layer
def unfold_conv_layer(W, option=True):
sizes = tf.shape(W)
return tf.reshape(W, shape=[sizes[0] * sizes[1], sizes[2], sizes[3]])
# svd
def top_singular_vector(M):
st, ut, vt = tf.svd(M, full_matrices=False)
M_size = tf.shape(M)
ut = tf.reshape(ut[:, 0], [M_size[0],1])
vt = tf.reshape(vt[:, 0], [M_size[1],1])
st = tf.matmul(ut,tf.transpose(vt))
return st