-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathoptimize_viewpoints.m
executable file
·83 lines (65 loc) · 1.61 KB
/
optimize_viewpoints.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
function [phi,J,iterations,s1,s2,phi_hist,J_hist,dJ_hist] = optimize_viewpoints(matching,precision,initialization,learning_rate)
%------------------------------------------------------
% Gradient Descent
%------------------------------------------------------
if(exist('learning_rate','var'))
alpha = learning_rate;
else
alpha = .1;
end
delta = .1 *pi/180;
J_hist = [];
dJ_hist = [];
phi_hist = [];
matching = wrapTo2Pi(matching);
n = size(matching,1);
if(exist('initialization','var'))
phi = initialization;
else
phi = matching(:,1); %Initialization
end
A = exp(1i*matching); %precompute the constants
iterations = 0;
while(1)
iterations = iterations + 1;
[J,s1,s2] = cost_function(phi,A);
%compute the approximation of the derivative
batch_size = n;
b = [];
while(nnz(b) < batch_size)
b(randi(n)) = 1;
end
fb = find(b);
dJ = zeros(n,1);
for ik = 1 : length(fb)
J_ = zeros(n,1);
k = fb(ik);
d = zeros(n,1);
d(k) = delta;
phi_ = phi+d;
J_(k) = cost_function(phi_,A);
dJ(k) = J_(k) - J;
end
% update phi
phi = phi - alpha*dJ;
J_hist(end+1) = J;
phi_hist(:,end+1) = phi;
dJ_hist(:,end+1) = dJ;
fprintf('%d \n',var(dJ));
if(var(dJ) < precision)
break;
end
% if(numel(J_hist) > 5000)
% break
% end
end
function [J,s1,s2] = cost_function(phi,A)
[p1,p2] = meshgrid(phi,phi);
ephi2 = exp(1i*p1).*exp(-1i*p2);
d = abs(ephi2 .* A' - 1).^2;
s1 = sum(d);
s2 = sum(d,2);
J = sum(s1);
n = size(d,1);
s1 = s1./n;
s2 = s2./n;