Skip to content

Commit

Permalink
edits with Chee
Browse files Browse the repository at this point in the history
  • Loading branch information
boris-kz committed Dec 5, 2024
1 parent f08e7a6 commit 716e97b
Show file tree
Hide file tree
Showing 3 changed files with 75 additions and 69 deletions.
74 changes: 39 additions & 35 deletions frame_2D_alg/vectorize_edge_blob/agg_recursion.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,8 @@
from comp_slice import comp_latuple, comp_md_
from trace_edge import comp_node_, comp_link_, sum2graph, get_rim, CH, CG, ave, ave_d, ave_L, vectorize_root, comp_area, extend_box
'''
Cross-compare and cluster edge blobs within a frame,
potentially unpacking their node_s first,
with recursive agglomeration
Cross-compare and cluster Gs within a frame, potentially unpacking their node_s first,
alternating agglomeration and centroid clustering.
'''

def agg_cluster_(frame): # breadth-first (node_,L_) cross-comp, clustering, recursion
Expand All @@ -22,10 +21,10 @@ def cluster_eval(G, N_, fd):
sG_ = cluster_N_(G, pL_, fd) # optionally divisive clustering
frame.subG_ = sG_
for sG in sG_:
if len(sG.node_) > ave_L:
if len(sG.subG_) > ave_L:
find_centroids(sG) # centroid clustering in sG.node_ or subG_?
'''
cross-comp converted edges, then GGs, GGGs, etc, interlaced with exemplar selection
cross-comp G_) GG_) GGG_., interlaced with exemplar centroid selection
'''
N_,L_, (m,d,r) = comp_node_(frame.subG_) # cross-comp exemplars, extrapolate to their node_s?
if m > ave * r:
Expand Down Expand Up @@ -90,10 +89,11 @@ def cluster_N_(root, L_, fd, nest=1): # top-down segment L_ by >ave ratio of L.
if et[0] > et[2] * ave * nest: # rdn incr/ dist decr
G_ += [sum2graph(root, Gt, fd, nest)]
else:
# unpack Gt
for n in node_: n.root_.pop()
return G_

''' Hierarchical clustering should alternate between two phases: generative by connectivity and compressive by centroid.
''' Hierarchical clustering should alternate between two phases: generative via connectivity and compressive via centroid.
Connectivity clustering terminates at effective contours: alt_Gs, beyond which cross-similarity is not likely to continue.
Next cross-comp is discontinuous and should be selective, for well-defined clusters: stable and likely recurrent.
Expand All @@ -102,13 +102,14 @@ def cluster_N_(root, L_, fd, nest=1): # top-down segment L_ by >ave ratio of L.
Only centroids (exemplars) need to be cross-compared on the next connectivity clustering level, representing their nodes.
So connectivity clustering is a generative learning phase, forming new derivatives and structured composition levels,
while centroid clustering is a compressive phase, reducing multiple similar comparands to a single exemplar.
'''
while centroid clustering is a compressive phase, reducing multiple similar comparands to a single exemplar. '''

def find_centroids(graph):

def centroid(dnode_, node_, C=None): # sum|subtract and average Rim nodes

if C is None: C = CG()
if C is None:
C = CG(); C.L = 0; C.M = 0 # setattr ave len node_ and summed match to nodes
for n in dnode_:
s = n.sign; n.sign=1 # single-use sign
C.n += n.n * s; C.Et += n.Et * s; C.rng = n.rng * s; C.aRad += n.aRad * s
Expand All @@ -120,7 +121,6 @@ def centroid(dnode_, node_, C=None): # sum|subtract and average Rim nodes
k = len(dnode_); C.n/=k; C.Et/=k; C.latuple/=k; C.mdLay/=k; C.aRad/=k
if C.derH: C.derH.norm_(k) # derH/=k
C.box = reduce(extend_box, (n.box for n in node_))
C.M = 0 # summed match to nodes
return C

def comp_C(C, N): # compute match without new derivatives: global cross-comp is not directional
Expand All @@ -134,27 +134,30 @@ def comp_C(C, N): # compute match without new derivatives: global cross-comp is
# comp node_, comp altG from converted adjacent flat blobs?
return mL + mA + mLat + mLay + mH

def centroid_cluster(N, clustered_): # refine and extend cluster with extN_
def centroid_cluster(N): # refine and extend cluster with extN_

_N_ = {n for L,_ in N.rim for n in L.nodet}
_N_ = {n for L,_ in N.rim for n in L.nodet if not n.fin}
n_ = _N_| {N} # include seed node
C = centroid(n_,n_)
while True:
N_, negN_, extN_, M, vM = [],[],[],0,0 # included, removed, extended nodes
N_,negN_,extN_, M, dM, extM = [],[],[], 0,0,0 # included, removed, extended nodes and values
for _N in _N_:
if _N in clustered_: continue
m = comp_C(C,_N)
vm = m - ave # deviation
if vm > 0:
extN_ += [link.nodet[0] if link.nodet[1] is _N else link.nodet[1] for link,_ in _N.rim] # next comp to C
N_ += [_N]; M += m; _N.M = m # to sum in C
if _N not in C.node_:
vM += vm; clustered_.add(_N) # only new nodes
elif _N in C.node_:
_N.sign=-1; negN_+=[_N]; vM += -vm # to subtract from C, vM += abs m deviation
clustered_.remove(_N) # if exclusive
if vM > ave: # new match, terminate (refine,extend) if low
extN_ = set(extN_) - clustered_ # exclude clustered Ns
N_ += [_N]; M += m
if _N.m: dM += m - _N.m # was in C.node_
else: dM += vm # new node
_N.m = m # to sum in C
for link, _ in _N.rim:
n = link.nodet[0] if link.nodet[1] is _N else link.nodet[1]
if n.fin or n.m: continue # in other C or in C.node_
extN_ += [n]; extM += n.derH.Et[0] # add external Ns for next loop
elif _N.m: # was in C.node_
_N.sign=-1; _N.m = 0; negN_+=[_N]; dM += -vm # dM += abs m deviation
# subtract from C
if dM > ave and M + extM > ave: # update val and reform val, terminate reforming if low
extN_ = set(extN_)
dN_ = extN_ | set(negN_)
if dN_: # recompute if any changes in node_
C = centroid(dN_,N_,C)
Expand All @@ -163,23 +166,23 @@ def centroid_cluster(N, clustered_): # refine and extend cluster with extN_
else:
if C.M > ave * 10:
for n in C.node_:
n.root_ += [C]; delattr(n, "sign")
n.fin = 1; n.root_ += [C]; delattr(n,"sign")
return C # centroid cluster
else: # unpack C.node_
for n in C.node_:
clustered_.remove(n); n.M = 0
for n in C.node_: n.m = 0
return N # keep seed node

# find representative centroids for complemented Gs: m-core + d-contour, initially within an edge
N_ = sorted(graph.subG_, key=lambda n: n.Et[0], reverse=True)
subG_, clustered_ = [], set()
for N in N_: N.sign = 1
for i, N in enumerate(N_): # connectivity cluster may have exemplar centroids
if N not in clustered_:
for N in N_:
N.sign, N.m, N.fin = 1, 0, 0 # setattr: C update sign, inclusion val, prior C inclusion flag
for i, N in enumerate(N_): # replace some of connectivity cluster by exemplar centroids
if not N.fin: # not in prior C
if N.Et[0] > ave * 10:
subG_ += [centroid_cluster(N, clustered_)] # extend from N.rim, return C if packed else N
subG_ += [centroid_cluster(N)] # extend from N.rim, return C if packed else N
else: # the rest of N_ M is lower
subG_ += [N for N in N_[i:] if N not in clustered_]
subG_ += [N for N in N_[i:] if not N.fin]
break
graph.subG_ = subG_ # mix of Ns and Cs: exemplars of their network?
if len(graph.subG_) > ave_L:
Expand All @@ -195,7 +198,8 @@ def centroid_cluster(N, clustered_): # refine and extend cluster with extN_
if frame.subG_: # converted edges
subG_ = []
for edge in frame.subG_:
find_centroids(edge) # here because trace_edge doesn't have find_centroids
subG_ += edge.subG_
frame.subG_ = subG_ # edge is not a connectivity cluster, unpack by default
agg_cluster_(frame) # connectivity clustering
if edge.subG_: # or / and edge Et?
find_centroids(edge) # no find_centroids in trace_edge
subG_ += edge.subG_ # unpack edge, or keep if connectivity cluster, or in flat blob altG_?
frame.subG_ = subG_
agg_cluster_(frame) # connectivity clustering
64 changes: 33 additions & 31 deletions frame_2D_alg/vectorize_edge_blob/comp_slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@
'''

ave_dI = ave_inv = 20 # ave inverse m, change to Ave from the root intra_blob?
ave, aved_d = 5, 5 # ave direct m, change to Ave_min from the root intra_blob?
aves = ave_mI, ave_mG, ave_mM, ave_mMa, ave_mA, ave_mL = ave, 10, 2, .1, .2, 2
ave, ave_d = 5, 5 # ave direct m, change to Ave_min from the root intra_blob?
aves = ave, ave_mG, ave_mM, ave_mMa, ave_mA, ave_mL = 5, 10, 2, .1, .2, 2
PP_aves = ave_PPm, ave_PPd = 50, 50
P_aves = ave_Pm, ave_Pd = 10, 10
ave_Gm = 50
Expand All @@ -46,35 +46,36 @@ def __init__(l, nodet, span, angle, yx, mdLay=None, latuple=None, Et=None, root=
super().__init__()

l.nodet = nodet # e_ in kernels, else replaces _node,node: not used in kernels?
l.latuple = np.array([.0,.0,.0,.0,.0,np.zeros(2)], dtype=object) if latuple is None else latuple # sum node_
l.mdLay = np.array([np.zeros(12),np.zeros(2),0],dtype=object) if mdLay is None else mdLay
l.latuple = np.array([.0,.0,.0,.0,.0, np.zeros(2)], dtype=object) if latuple is None else latuple # sum node_
l.mdLay = np.array([np.zeros(12), np.zeros(2), 0], dtype=object) if mdLay is None else mdLay
l.angle = angle # dy,dx between node centers
l.span = span # distance between node centers
l.yx = yx # sum node_
l.Et = np.zeros(2) if Et is None else Et
l.root = root # PPds containing dP
l.nmed = 0 # comp rng: n of mediating Ps between node_ Ps
l.lrim = []
l.prim = []
# l.med = 0 # comp rng: n of mediating Ps between node_ Ps
# n = 1?
def __bool__(l):
return any(l.mdLay[0]) # l.mdLay.H
return any(l.mdLay[0]) # mdLay.H

def comp_md_(_md_, md_, rn=1, dir=1): # replace dir with rev?

vm, vd, rm, rd = 0,0,0,0
M, D = 0, 0
derLay = []
for i, (_d, d) in enumerate(zip(_md_[1::2], md_[1::2])): # compare ds in md_ or ext
d *= rn # normalize by compared accum span
diff = (_d - d) * dir
match = min(abs(_d), abs(d))
if (_d < 0) != (d < 0): match = -match # negate if only one compared is negative
vm += match - aves[i] # fixed param set?
vd += diff
M += match # maybe negative
D += abs(diff) # potential compression
derLay += [match, diff] # flat
prj_d = abs(vd) * (vm/ave)
prj_m = vm - (prj_d / 2)
return np.array([np.array(derLay, dtype=float), np.array([prj_m,prj_d], dtype=float), 1],dtype=object) # [md_, Et, n]
vD = D * (M / ave) # project by borrow from rel M
vM = M - vD / 2 # cancel by lend to D

return np.array([np.array(derLay, dtype=float), np.array([vM, vD], dtype=float), 1], dtype=object) # [md_, Et, n]

def vectorize_root(frame):

Expand Down Expand Up @@ -118,12 +119,12 @@ def comp_P_(edge): # form links from prelinks
angle=[dy,dx]; distance=np.hypot(dy,dx)
rn = len(_P.dert_) / len(P.dert_)
md_ = comp_latuple(_P.latuple, P.latuple, rn)
vm = sum(md_[::2]); vd = sum(np.abs(md_[1::2]))
n = (len(_P.dert_)+len(P.dert_)) / 2 # der value = ave compared n?
prj_d = abs(vd) * (vm/ave)
prj_m = vm - prj_d / 2
derLay = np.array([md_, np.array([prj_m,prj_d]), n], dtype=object)
link = convert_to_dP(_P, P, derLay, angle, distance, fd=0)
M, D = md_[-2:]
vD = D * (M / sum(aves)) # borrow from projected M
vM = M - vD / 2 - sum(aves) # cancel by lend to D
n = (len(_P.dert_) + len(P.dert_)) / 2 # norm by ave compared n?
derLay = np.array([md_, np.array([vM, vD]), n], dtype=object)
link = convert_to_dP(_P,P, derLay, angle, distance, fd=0)
if link:
P.rim += [link]
del edge.pre__
Expand Down Expand Up @@ -213,20 +214,21 @@ def comp_latuple(_latuple, latuple, rn, fagg=0): # 0der params
_I, _G, _M, _Ma, _L, (_Dy, _Dx) = _latuple
I, G, M, Ma, L, (Dy, Dx) = latuple

dI = _I - I*rn; mI = ave_dI - dI
dG = _G - G*rn; mG = min(_G, G*rn) - ave_mG
dM = _M - M*rn; mM = get_match(_M, M*rn) - ave_mM # M, Ma may be negative
dMa= _Ma- Ma*rn; mMa = get_match(_Ma, Ma*rn) - ave_mMa
dL = _L - L*rn; mL = min(_L, L*rn) - ave_mL
mAngle, dAngle = comp_angle((_Dy,_Dx), (Dy,Dx))
mAngle -= ave_mA

ret = np.array([mL,dL,mI,dI,mG,dG,mM,dM,mMa,dMa,mAngle-aves[5],dAngle])
dI = _I - I*rn; mI = ave_dI - dI; vI = mI - ave
dG = _G - G*rn; mG = min(_G, G*rn); vG = mG - ave_mG
dM = _M - M*rn; mM = get_match(_M, M*rn); vM = mM - ave_mM # M, Ma may be negative
dMa= _Ma- Ma*rn; mMa = get_match(_Ma, Ma*rn); vMa = mMa - ave_mMa
dL = _L - L*rn; mL = min(_L, L*rn); vL = mL - ave_mL
mAngle,dAngle = comp_angle((_Dy,_Dx),(Dy,Dx)); vA = mAngle - ave_mA
# abs totals
tM = mI + mG + mM + mMa + mL + mAngle
tD = abs(dI) + abs(dG) + abs(dM) + abs(dMa) + abs(dL) + abs(dAngle)

ret = np.array([vL,dL, vI,dI, vG,dG, vM,dM, vMa,dMa, vA,dAngle, tM,tD])
if fagg: # add norm m,d, ret=[ret,Ret]:
mval, dval = sum(ret[::2]),sum(ret[1::2])
prj_d = abs(dval) * (mval/ave)
prj_m = mval - prj_d
ret = np.array([ret, np.array([prj_m,prj_d]), 1], dtype=object) # if fagg only
prj_d = tD * (tM / sum(aves))
prj_m = tM - prj_d / 2 - sum(aves)
ret = np.array([ret, np.array([prj_m, prj_d]), 1], dtype=object) # if fagg only
return ret

def get_match(_par, par):
Expand Down
6 changes: 3 additions & 3 deletions frame_2D_alg/vectorize_edge_blob/trace_edge.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def __init__(He, n=0, H=None, Et=None, node_=None, md_t=None, root=None, i=None,
super().__init__()
He.H = [] if H is None else H # nested derLays | md_ in md_C, empty in bottom layer
He.n = n # total number of params compared to form derH, to normalize comparands
He.Et = np.zeros(3) if Et is None else Et # summed from links
He.Et = [] if Et is None else Et # summed from links
He.node_ = [] if node_ is None else node_ # concat bottom nesting order if CG, may be redundant to G.node_
He.md_t = [] if md_t is None else md_t # derivation layer in H
He.root = None if root is None else root # N or higher-composition He
Expand Down Expand Up @@ -307,8 +307,8 @@ def comp_node_(_N_): # rng+ forms layer of rim and extH per N, appends N_,L_,Et
if _nrim & nrim: # indirectly connected Gs,
continue # no direct match priority?
M = ( (_G.mdLay[1][0] + G.mdLay[1][0]) * icoef**2 # internal vals are less predictive in external comp
+ (_G.derH.Et[0] + G.derH.Et[0] ) * icoef
+ (_G.extH.Et[0] + G.extH.Et[0] ) )
+ ((_G.derH.Et[0] if _G.derH.Et else 0) + (G.derH.Et[0] if G.derH.Et else 0) ) * icoef
+ ((_G.extH.Et[0] if _G.extH.Et else 0) + (G.extH.Et[0] if G.extH.Et else 0) ))
if dist < max_dist * (radii * icoef**3) * M:
Link = comp_N(_G,G, rn,angle=[dy,dx],dist=dist)
L_ += [Link] # include -ve links
Expand Down

0 comments on commit 716e97b

Please sign in to comment.