Skip to content

Commit

Permalink
edits with Chee
Browse files Browse the repository at this point in the history
  • Loading branch information
boris-kz committed Feb 12, 2025
1 parent 1740235 commit 9b57077
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 43 deletions.
59 changes: 37 additions & 22 deletions frame_2D_alg/agg_recursion.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from multiprocessing import Pool, Manager
from frame_blobs import frame_blobs_root, intra_blob_root, imread, aves, Caves
from comp_slice import comp_latuple, comp_vert
from vect_edge import L2N, sum_H, add_H, comp_H, comp_N, comp_node_, comp_link_, sum2graph, get_rim, CG, vectorize_root, comp_area, extend_box, Val_
from vect_edge import L2N, comb_H_, sum_H, add_H, comp_H, comp_N, comp_node_, comp_link_, sum2graph, get_rim, CG, vectorize_root, comp_area, extend_box, Val_
'''
notation:
prefix f: flag
Expand Down Expand Up @@ -42,25 +42,25 @@ def cross_comp(root): # form agg_Level by breadth-first node_,link_ cross-comp,
N_,L_,Et = comp_node_(root.node_[-1]) # cross-comp top-composition exemplars in root.node_
# mfork
if Val_(Et, _Et=Et, fd=0) > 0: # cluster eval
derH = [[mlay] for mlay in sum_H(L_,root, fd=1)] # nested mlay per layer
pL_ = {l for n in N_ for l,_ in get_rim(n,fd=0)}
derH = [[comb_H_(L_, root, fd=1)]] # nested mlay
pL_ = {l for n in N_ for l,_ in get_rim(n, fd=0)}
if len(pL_) > ave_L:
cluster_N_(root, pL_, fd=0) # form multiple distance segments, same depth
# dfork, one for all distance segments, adds altGs, no higher Gs:
L2N(L_,root)
lN_,lL_,dEt = comp_link_(L_,Et) # same root for L_, root.link_ was compared in root-forming for alt clustering
if Val_(dEt, _Et=Et, fd=1) > 0:
dderH = sum_H(lL_, root, fd=1)
for lay, dlay in zip(derH, dderH): lay += [dlay]
derH += [[[], dderH[-1]]] # dderH is longer
plL_ = {l for n in lN_ for l,_ in get_rim(n,fd=1)}
if len(plL_) > ave_L:
cluster_N_(root, plL_, fd=1) # form altGs for cluster_C_, no new links between dist-seg Gs
else:
for lay in derH:
lay += [[]] # empty dlay
cluster_N_(root, pL_,fd=0) # form multiple distance segments, same depth
if Val_(Et, _Et=Et, fd=0) > 0:
# dfork, one for all distance segments, adds altGs, no higher Gs:
L2N(L_,root)
lN_,lL_,dEt = comp_link_(L_,Et) # same root for L_, root.link_ was compared in root-forming for alt clustering
if Val_(dEt, _Et=Et, fd=1) > 0:
derH[0] += [comb_H_(lL_, root, fd=1)] # += dlay
plL_ = {l for n in lN_ for l,_ in get_rim(n,fd=1)}
if len(plL_) > ave_L:
cluster_N_(root, plL_, fd=1) # form altGs for cluster_C_, no new links between dist-seg Gs
else:
derH[0] += [[]] # empty dlay
else: derH[0] += [[]] # empty dlay
root.derH = derH # replace lower derH, may not align to node_,link_H append in cluster_N_
comb_altG_(top_(root)) # comb node contour: altG_ | neg links sum, cross-comp -> CG altG
# agg eval +=derH,node_H:
cluster_C_(root) # -> mfork G,altG exemplars, +altG surround borrow, root.derH + 1|2 lays
# no dfork cluster_C_, no ddfork
# if val_: lev_G -> agg_H_seq
Expand Down Expand Up @@ -271,6 +271,20 @@ def sort_H(H, fd): # re-assign olp and form priority indices for comp_tree, if
if not fd:
H.root.node_ = H.node_

def weigh_m_(m_, M, ave): # adjust weights on attr matches, also add cost attrs
_w_ = [1 for _ in m_]

while True:
w_ = [min(m/M, M/m) for m in m_] # rational deviations from mean,
# or balanced: min(m/M, M/m) + (m+M)/2?
Dw = sum([abs(w-_w) for w,_w in zip(w_,_w_)]) # weight update
M = sum(m*w for m, w in zip(m_,w_)) / sum(w_) # M update
if Dw > ave:
_w_ = w_
else:
break
return w_, M

def agg_level(inputs): # draft parallel

frame, H, elevation = inputs
Expand Down Expand Up @@ -338,11 +352,12 @@ def agg_H_seq(focus): # sequential level-updating pipeline
hG = lev_G; agg_H = agg_H[:-1] # local top graph, gets no feedback
while agg_H:
lev_G = agg_H.pop()
dm_ = hG.vert[0] - lev_G.vert[0] # need to add other aves?
if sum(dm_) > 0: # aves update value
lev_G.dm_ = dm_ # aves, proj agg+'m = m + dm?
# project box by decomposed d-val per Et: np.sqrt(sum([m**2 for m in m_]) /L)?
# get dy,dx: frame.vert A or frame.latuple [Dy,Dx]?
hm_ = hG.vert[0] # need to add other aves?
hm_ = weigh_m_(hm_, sum(hm_), ave)
dm_ = hm_ - lev_G.aves
if sum(dm_) > 0: # update
lev_G.aves = hm_ # proj agg+'m = m + dm?
# project box if val_* dy,dx: frame.vert A or frame.latuple [Dy,Dx]?
# add cost params: distance, len? min,max coord filters
hG = lev_G # replace higher lev
else: break
Expand Down
10 changes: 3 additions & 7 deletions frame_2D_alg/comp_slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,7 @@ def comp_latuple(_latuple, latuple, _n,n): # 0der params, add dir?
I, G, M, D, L, (Dy, Dx) = latuple
rn = _n / n

I*=rn; dI = _I - I; mI = ave_dI -dI; MI = max(_I,I) # vI = mI - ave
I*=rn; dI = _I - I; mI = ave_dI -dI; MI = max(_I,I) # vI = mI - ave)
G*=rn; dG = _G - G; mG = min(_G, G); MG = max(_G,G) # vG = mG - ave_mG
M*=rn; dM = _M - M; mM = min(_M, M); MM = max(_M,M) # vM = mM - ave_mM
D*=rn; dD = _D - D; mD = min(_D, D); MD = max(_D,D) # vD = mD - ave_mD
Expand All @@ -184,10 +184,8 @@ def comp_latuple(_latuple, latuple, _n,n): # 0der params, add dir?

d_ = np.array([dI, dG, dM, dD, dL, dA])
m_ = np.array([mI/ MI, mG/ MG, mM/ MM, mD/ MD, mL/ ML, mA]) # angle is already normal
D = np.sqrt(sum([d**2 for d in d_]) /6) # m/M - weighted sum of 6 pars
M = np.sqrt(sum([m**2 for m in m_]) /6)

return np.array([m_,d_]), np.array([M,D])
return np.array([m_,d_]), np.array([sum(m_),sum(d_)])

def comp_vert(_i_,i_, rn=.1, dir=1): # i_ is ds, dir may be -1

Expand All @@ -196,10 +194,8 @@ def comp_vert(_i_,i_, rn=.1, dir=1): # i_ is ds, dir may be -1
_a_,a_ = np.abs(_i_), np.abs(i_)
m_ = np.divide( np.minimum(_a_,a_), reduce(np.maximum, [_a_, a_, 1e-7])) # rms
m_[(_i_<0) != (d_<0)] *= -1 # m is negative if comparands have opposite sign
M = np.sqrt(sum([m**2 for m in m_]) /6) # m/M - weighted sum of 6 pars
D = np.sqrt(sum([d**2 for d in d_]) /6) # same weighting?

return np.array([m_,d_]), np.array([M,D]) # Et
return np.array([m_,d_]), np.array([sum(m_),sum(d_)]) # Et
'''
sequential version:
md_, dd_ = [],[]
Expand Down
3 changes: 3 additions & 0 deletions frame_2D_alg/deprecated/25.2.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,4 +79,7 @@ def comp_md_t(_d_t,d_t, rn=.1, dir=1): # dir may be -1
m_t_ += [md_]; M += np.sqrt(sum([m**2 for m in md_]) / len(d_)) # weighted sum
d_t_ += [dd_]; D += dd_.sum() # same weighting?

M = sum([ np.sqrt( sum([m**2 for m in m_]) /len(m_)) for m_ in m_t]) # weigh M,D by ind_m / tot_M
D = sum([ np.sqrt( sum([d**2 for d in d_]) /len(d_)) for d_ in d_t])

return np.array([m_t_,d_t_],dtype=object), np.array([M,D]) # [m_,d_], Et
34 changes: 20 additions & 14 deletions frame_2D_alg/vect_edge.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ def __init__(l, **kwargs):
l.root = kwargs.get('root', None) # higher node or link
l.node_ = kwargs.get('node_', []) # concat across fork tree
l.link_ = kwargs.get('link_', [])
l.m_d_t = kwargs.get('m_d_t', [[np.zeros(2),np.zeros(2)],[np.zeros(6),np.zeros(6)]]) # [[mext,mver],[dext,dver]], sum across fork tree
l.m_d_t = kwargs.get('m_d_t', [[np.zeros(2),np.zeros(6)],[np.zeros(2),np.zeros(6)]]) # [[mext,mver],[dext,dver]], sum across fork tree
# altL = CLay from comp altG
# i = kwargs.get('i', 0) # lay index in root.node_, link_, to revise olp
# i_ = kwargs.get('i_',[]) # priority indices to compare node H by m | link H by d
Expand Down Expand Up @@ -89,16 +89,15 @@ def comp_lay(_lay, lay, rn, root, dir=1): # unpack derH trees down to numerical
i_t = [i_ * rn * dir for i_ in lay.m_d_t[1]] # i_ is ds, scale- and direction- normalized
d_t = [_i_ - i_ for _i_,i_ in zip(_i_t,i_t)] # [dext,dver])

_a_t, a_t = [(np.abs(_i_), np.abs(i_)) for _i_,i_ in zip(_i_t,i_t)]
m_t = [np.minimum(_a_,a_)/ reduce(np.maximum,[_a_,a_,1e-7]) for _a_,a_ in zip(_a_t,a_t)] # match = min/max comparands
for fv, (_i_,i_) in enumerate(zip(_i_t, i_t)): # [dext,vert]
m_t[fv][(_i_<0) != (i_<0)] *= -1 # m is negative if comparands have opposite sign
_a_t = [np.abs(_i_) for _i_ in _i_t]; a_t = [np.abs(i_) for i_ in i_t]
m_t = [np.minimum(_a_,a_) / reduce(np.maximum,[_a_,a_,1e-7]) for _a_,a_ in zip(_a_t,a_t)] # match = min/max comparands
for f, (_i_,i_) in enumerate(zip(_i_t, i_t)): # [dext,vert]
m_t[f][(_i_<0) != (i_<0)] *= -1 # m is negative if comparands have opposite sign
m_d_t = [m_t,d_t] # [[mext,mvert],[dext,dvert]]
node_ = list(set(_lay.node_+ lay.node_)) # concat
link_ = _lay.link_ + lay.link_
M = sum([ np.sqrt( sum([m**2 for m in m_]) /len(m_)) for m_ in m_t]) # weigh M,D by ind_m / tot_M
D = sum([ np.sqrt( sum([d**2 for d in d_]) /len(d_)) for d_ in d_t])

M = sum(m_t[0]) + sum(m_t[1])
D = sum(d_t[0]) + sum(d_t[1])
Et = np.array([M, D, 2 if len(i_t)==1 else 8, (_lay.Et[3]+lay.Et[3])/2]) # n comp params = 2 in dext, 6 in vert
if root: root.Et += Et

Expand Down Expand Up @@ -220,19 +219,19 @@ def cluster_PP_(N_, fd):
N_,L_,Et = comp_node_(edge.node_)
edge.link_ += L_
if Val_(Et, _Et=Et, fd=0) > 0: # cluster eval
mlay = sum_lay_(L_, edge); derH = [[mlay]] # single nested mlay
derH = [[sum_lay_(L_,edge,)]] # single nested mlay
if len(N_) > ave_L:
cluster_PP_(N_, fd=0)
if Val_(Et, _Et=Et, fd=0) > 0: # likely not from the same links
L2N(L_,edge) # comp dPP_:
lN_,lL_,dEt = comp_link_(L_,Et)
if Val_(dEt, _Et=Et, fd=1) > 0:
lay_t = sum_H(lL_, edge, fd=1) # two-layer dfork
derH = [[mlay,lay_t[0]], [[],lay_t[1]]] # two-layer derH
derH[0] += [comb_H_(lL_, edge,fd=1)] # dlay, or sum_lay_?
if len(lN_) > ave_L:
cluster_PP_(lN_, fd=1)
else:
derH[0] += [[]] # empty dlay
else: derH[0] += [[]]
edge.derH = derH

def comp_node_(_N_, L=0): # rng+ forms layer of rim and extH per N, appends N_,L_,Et, ~ graph CNN without backprop
Expand Down Expand Up @@ -350,7 +349,7 @@ def comp_dext(_dext, dext, rn, dir=1):

def comp_N(_N,N, rn, angle=None, dist=None, dir=1): # dir if fd, Link.derH=dH, comparand rim+=Link

fd = isinstance(N,CL); derH=[] # compare links, relative N direction = 1|-1
fd = isinstance(N,CL); dderH=[] # compare links, relative N direction = 1|-1
# comp externals:
if fd:
_L, L = _N.dist, N.dist; L*=rn; dL = _L - L; mL = min(_L,L) / max(_L,L) - ave_L # rm
Expand All @@ -377,9 +376,9 @@ def comp_N(_N,N, rn, angle=None, dist=None, dir=1): # dir if fd, Link.derH=dH,
Et += np.array([lEt[0], lEt[1], 2, 0]) # same olp?
vert += dLat
if M > ave and (len(N.derH) > 2 or isinstance(N,CL)): # else derH is redundant to dext,vert
derH = comp_H(_N.derH, N.derH, rn, Link, Et, fd) # comp shared layers, if any
dderH = comp_H(_N.derH, N.derH, rn, Link, Et, fd) # comp shared layers, if any
# comp_node_(node_|link_)
Link.derH = [CLay(root=Link,Et=Et,node_=[_N,N],link_=[Link], m_d_t=[[dext[0],vert[0]],[[dext[1],vert[1]]]]), *derH]
Link.derH = [CLay(root=Link,Et=Et,node_=[_N,N],link_=[Link], m_d_t=[[dext[0],vert[0]],[dext[1],vert[1]]]), *dderH]
# spec:
if not fd and _N.altG and N.altG: # if alt M?
Link.altL = comp_N(_N.altG, N.altG, _N.altG.Et[2] / N.altG.Et[2])
Expand Down Expand Up @@ -440,6 +439,13 @@ def sum_lay_(link_, root):
for link in link_: lay0.add_lay(link.derH[0])
return lay0

def comb_H_(L_, root, fd):

derH = sum_H(L_,root,fd=fd)
Lay = CLay(root=root)
for lay in derH: Lay.add_lay(lay)
return Lay

def sum_H(Q, root, rev=0, fc=0, fd=0): # sum derH in link_|node_
DerH = []
for e in Q: add_H(DerH, e.derH, root, rev, fc, fd)
Expand Down

0 comments on commit 9b57077

Please sign in to comment.