Skip to content

Commit

Permalink
edits with Chee
Browse files Browse the repository at this point in the history
  • Loading branch information
boris-kz committed Nov 30, 2024
1 parent c499ac4 commit d1fcd23
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 63 deletions.
52 changes: 27 additions & 25 deletions frame_2D_alg/vectorize_edge_blob/agg_recursion.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
sys.path.append("..")
import numpy as np
from copy import copy, deepcopy
from functools import reduce
from itertools import combinations
from frame_blobs import CBase, frame_blobs_root, intra_blob_root, imread, unpack_blob_
from comp_slice import comp_latuple, comp_md_
Expand Down Expand Up @@ -62,7 +63,7 @@ def cluster_N_(root, L_, fd, nest=1): # top-down segment L_ by >ave ratio of L.
Gt_ = []
for N in N_: # cluster current distance segment
if len(N.root_) > nest: continue # merged, root_[0] = edge
node_,link_, et = set(),set(), np.array([.0,.0, 1.0])
node_,link_, et = set(),set(), np.array([.0,.0,1.])
Gt = [node_,link_,et, min_dist]; N.root_ += [Gt]
_eN_ = {N}
while _eN_:
Expand Down Expand Up @@ -105,20 +106,8 @@ def cluster_N_(root, L_, fd, nest=1): # top-down segment L_ by >ave ratio of L.
'''
def get_exemplar_(frame):

def comp_cN(_N, N): # compute match without new derivatives: global cross-comp is not directional

rn = _N.n / N.n
mL = min(len(_N.node_),len(N.node_)) - ave_L
mA = comp_area(_N.box, N.box)[0]
mLat = comp_latuple(_N.latuple, N.latuple, rn,fagg=1)[1][0]
mLay = comp_md_(_N.mdLay[0], N.mdLay[0], rn)[1][0]
HEt = _N.derH.comp_H(N.derH, rn).Et if _N.derH and N.derH else np.zeros(3)
# comp node_, comp altG from converted adjacent flat blobs?

return mL+mA+mLat+mLay+HEt[0], HEt[2]

def xcomp_(N_): # initial global cross-comp
for g in N_: g.M, g.Mr, g.sign = 0,0,1 # setattr
for g in N_: g.M, g.Mr, g.sign = 0,0,1 # setattr Rim vals for exemplars, then converted to extH

for _G, G in combinations(N_, r=2):
rn = _G.n/G.n
Expand All @@ -130,33 +119,46 @@ def xcomp_(N_): # initial global cross-comp
if vM > 0:
g.perim.add(link) # loose match
if vM > ave * r: # strict match
g.Rim.add((link)); g.M+=m; g.Mr+=r
g.Rim.add(link); g.M+=m; g.Mr+=r

def centroid(node_, C=CG()): # sum|subtract and average Rim nodes

for n in node_:
s = n.sign
C.n += n.n * s; C.Et += n.Et * s; C.rng = n.rng * s; C.aRad += n.aRad * s
C.latuple += n.latuple * s; C.mdLay += n.mdLay * s
C.derH.add_H(n.derH); C.extH.add_H(n.extH); C.box = extend_box(C.box,n.box)
# need to add sign in add_H and extend_box to remove instead adding?
if n.derH: C.derH.add_H(n.derH, sign=s)
if n.extH: C.extH.add_H(n.extH, sign=s)
# get averages:
k = len(node_); C.n/=k; C.Et/=k; C.latuple/=k; C.mdLay/=k; C.aRad/=k; C.derH.norm_(k) # derH/=k
k = len(node_); C.n/=k; C.Et/=k; C.latuple/=k; C.mdLay/=k; C.aRad/=k
if C.derH: C.derH.norm_(k) # derH/=k
return C

def comp_C(_N, N): # compute match without new derivatives: global cross-comp is not directional

rn = _N.n / N.n
mL = min(len(_N.node_),len(N.node_)) - ave_L
mA = comp_area(_N.box, N.box)[0]
mLat = comp_latuple(_N.latuple, N.latuple, rn,fagg=1)[1][0]
mLay = comp_md_(_N.mdLay[0], N.mdLay[0], rn)[1][0]
mH = _N.derH.comp_H(N.derH, rn).Et[0] if _N.derH and N.derH else 0
# comp node_, comp altG from converted adjacent flat blobs?

return mL+mA+mLat+mLay+mH

def refine_by_centroid(N): # refine Rim to convergence

_perim,_M = N.perim, N.M # no use for Mr
node_ = {n for L in N.Rim for n in L.nodet } | {N}
C = centroid(node_)
node_ = {n for L in N.Rim for n in L.nodet} | {N}
C = centroid(node_); C.box = reduce(extend_box, (n.box for n in node_))
while True:
dnode_, Rim, perim, M = set(), set(), set(), 0
for link in _perim:
_N, m = link.nodet[0] if link.nodet[1] is N else link.nodet[1], link.derH.Et[0]
mm,rr = comp_cN(C,_N)
if mm > ave * rr:
mm = comp_C(C,_N)
if mm > ave:
perim.add(link)
if mm > ave * rr * 2:
if mm > ave * 2:
Rim.add(link); M += m # copy link from perim to Rim
if _N not in node_:
node_.add(_N); _N.sign=1; dnode_.add(_N)
Expand All @@ -165,7 +167,7 @@ def refine_by_centroid(N): # refine Rim to convergence
node_.remove(_N); _N.sign=-1; dnode_.add(_N) # sign=-1 to remove in centroid()
if M / _M < 1.2:
break # convergence
C = centroid(dnode_, C)
C = centroid(dnode_,C); C.box = reduce(extend_box, (n.box for n in node_))
_Rim,_perim,_M = Rim, perim, M

N.Rim, N.perim, N.M = list(Rim),list(perim), M
Expand Down Expand Up @@ -195,7 +197,7 @@ def prune_overlap(N_): # select Ns with M > ave * Mr
refine_by_centroid(N) # refine N.Rim
exemplar_ = []
for N in N_:
if eval_overlap(N) and N.M + N.Et[0] > ave * (N.Et[2] + N.Mr/len(N.Rim)): # normalize Mr
if eval_overlap(N) and N.M + N.Et[0] > ave * (N.Et[2] + N.Mr/(len(N.Rim)+1)): # normalize Mr
exemplar_ += [N]
return exemplar_

Expand Down
73 changes: 35 additions & 38 deletions frame_2D_alg/vectorize_edge_blob/trace_edge.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,16 +8,15 @@
import numpy as np

'''
This code is initially for clustering segments within edge: high-gradient blob, but it's far too complex for that.
That's because this is a prototype for open-ended compositional recursion: clustering blobs, graphs of blobs, etc.
We will later prune it down to lighter edge-specific version.
This code is initially for clustering segments within edge: high-gradient blob, but too complex for that.
It's mostly a prototype for open-ended compositional recursion: clustering blobs, graphs of blobs, etc.
-
Primary incremental-range (rng+) fork cross-comp leads to clustering edge segments, initially PPs, that match over < max distance.
Secondary incr-derivation (der+) fork cross-compares links from primary cross-comp, if >ave ~(abs_diff * primary_xcomp_match):
1st incremental-range (rng+) fork cross-comp leads to clustering edge segments, initially PPs, that match over < max distance.
2nd incremental-derivation (der+) fork cross-comps links from primary cross-comp, if >ave ~(abs_diff * primary_xcomp_match):
variance patterns borrow value from co-projected match patterns, because their projections cancel-out.
-
Thus graphs should be assigned adjacent alt-fork (der+ to rng+) graphs, to which they lend predictive value.
But alt match patterns borrow already borrowed value, which are too tenuous to track, we use average borrowed value.
But alt match patterns borrow already borrowed value, which is too tenuous to track, we use average borrowed value.
Clustering criterion within each fork is summed match of >ave vars (<ave vars are not compared and don't add comp costs).
-
Clustering is exclusive per fork,ave, with fork selected per variable | derLay | aggLay
Expand Down Expand Up @@ -65,13 +64,13 @@ def __init__(He, node_=None, md_t=None, n=0, H=None, root=None, i=None, i_=None,
# He.nest = 0 if nest is None else nest # nesting in H
def __bool__(H): return H.n != 0

def add_lay(HE, He):
def add_lay(HE, He, sign):

for Md_, md_ in zip(HE.md_t, He.md_t): # [mdExt, possibly mdLat, mdLay]
Md_ += md_
HE.Et+= He.Et; HE.n += He.n # combined n params
Md_ += md_ * sign
HE.Et+= He.Et * sign; HE.n += He.n * sign # combined n params

def add_H(HE, He_, root=None, ri=None): # unpack derHs down to numericals and sum them
def add_H(HE, He_, root=None, ri=None, sign=1): # unpack derHs down to numericals and sum them

if not isinstance(He_,list): He_ = [He_]
for He in He_:
Expand All @@ -82,7 +81,7 @@ def add_H(HE, He_, root=None, ri=None): # unpack derHs down to numericals and s
else:
if Lay is None: HE.append_(lay.copy_(root=HE)) # pack a copy of new lay in HE.H
else: HE.H[i] = lay.copy_(root=HE) # Lay was []
HE.add_lay(He)
HE.add_lay(He, sign)
HE.node_ += [node for node in He.node_ if node not in HE.node_] # node_ is empty in CL derH?
elif root:
if ri is None: root.derH = He.copy_(root=root)
Expand Down Expand Up @@ -159,20 +158,18 @@ def sort_H(He, fd): # re-assign rdn and form priority indices for comp_H, if se

class CG(CBase): # PP | graph | blob: params of single-fork node_ cluster

def __init__(G, rng=1, fd=0, n=0, Et=np.array([.0,.0,1.0]), root_=[], node_=[], link_=[], box=None, yx=None,
latuple=None, mdLay=None, derH=None, extH=None, subG_=None, altG=None, subL_=None, minL=None):
def __init__(G, n=0, fd=0, rng=1, root_=[], node_=[], link_=[], subG_=[], subL_=[],
Et=None, latuple=None, mdLay=None, derH=None, extH=None, altG=None, box=None, yx=None):
super().__init__()
G.n = n # last layer?
G.M = 0 # Rim val for centroid Gs
G.fd = 0 if fd else fd # 1 if cluster of Ls | lGs?
G.Et = Et # sum all param Ets
G.fd = fd # 1 if cluster of Ls | lGs?
G.rng = rng
G.root_ = [] if root_ is None else root_ # in cluster_N_, same nodes may be in multiple dist layers
G.node_ = [] if node_ is None else node_ # convert to GG_ or node_H in agg++
G.link_ = [] if link_ is None else link_ # internal links per comp layer in rng+, convert to LG_ in agg++
G.subG_ = [] if subG_ is None else subG_ # selectively clustered node_
G.subL_ = [] if subL_ is None else subL_ # selectively clustered link_
G.minL = 0 if minL is None else minL # min link.dist in subG

This comment has been minimized.

Copy link
@kwcckw

kwcckw Nov 30, 2024

So minL is not needed now? I think last time you mentioned this information is useful too?

This comment has been minimized.

Copy link
@boris-kz

boris-kz via email Nov 30, 2024

Author Owner

This comment has been minimized.

Copy link
@kwcckw

kwcckw Nov 30, 2024

Okay, but it's removed from __init__ above? We may need to init it in Cgraph initialization?

This comment has been minimized.

Copy link
@boris-kz

boris-kz Nov 30, 2024

Author Owner

right

G.root_ = root_ # in cluster_N_, same nodes may be in multiple dist layers
G.node_ = node_ # convert to GG_ or node_H in agg++
G.link_ = link_ # internal links per comp layer in rng+, convert to LG_ in agg++
G.subG_ = subG_ # selectively clustered node_
G.subL_ = subL_ # selectively clustered link_
G.Et = np.array([.0,.0,1.]) if Et is None else Et # sum all param Ets
G.latuple = np.array([.0,.0,.0,.0,.0,np.zeros(2)],dtype=object) if latuple is None else latuple # lateral I,G,M,Ma,L,[Dy,Dx]
G.mdLay = np.array([np.zeros(12), np.zeros(2), 0],dtype=object) if mdLay is None else mdLay # mdLat, et, n
# maps to node_H / agg+|sub+:
Expand Down Expand Up @@ -202,7 +199,6 @@ def __init__(l, nodet=None, dist=None, derH=None, angle=None, box=None, H_=None,
l.dist = 0 if dist is None else dist # distance between nodet centers
l.box = [] if box is None else box # sum nodet, not needed?
l.yx = [0,0] if yx is None else yx
l.Et = np.array([.0,.0,1.0]) # for rim-overlap modulated segmentation
l.H_ = [] if H_ is None else H_ # if agg++| sub++?
# add med, rimt, elay | extH in der+
def __bool__(l): return bool(l.derH.H)
Expand All @@ -226,8 +222,8 @@ def vectorize_root(frame):
md_,Et,n = N[3] if isinstance(N,list) else N.mdLay # N is CP
if any(md_) and Et[0] > ave: # convert PP|P to G:
root_,P_,link_,(md_,Et,n), lat, A, S, area, box, [y,x], n = N # PPt
Et = np.append(Et,1.0) # rdn
PP = CG(fd=0, root_=[root_], node_=P_,link_=link_,mdLay=np.array([md_,Et,n],dtype=object),latuple=lat, box=box,yx=[y,x],n=n)
PP = CG(fd=0, Et=np.append(Et,1.), root_=[root_], node_=P_, link_=link_, mdLay=np.array([md_,Et,n],dtype=object),
latuple=lat, box=box, yx=[y,x], n=n) # no altG until cross-comp
y0,x0,yn,xn = box
PP.aRad = np.hypot(*np.subtract(PP.yx,(yn,xn)))
edge.n += PP.n
Expand All @@ -236,6 +232,7 @@ def vectorize_root(frame):
if len(G_) > ave_L:
edge.subG_ = G_
intra_edge(edge); frame.subG_ += [edge]; frame.derH.add_H(edge.derH)
# add altG: summed converted adj_blobs of converted edge blob
# if len(edge.subG_) > ave_L: agg_recursion(edge) # unlikely
def intra_edge(edge):

Expand Down Expand Up @@ -400,7 +397,7 @@ def comp_N(_N,N, rn, angle=None, dist=None, dir=None): # dir if fd, Link.derH=d
_L, L = len(_N.node_),len(N.node_); dL = _L-L; mL = min(_L,L) - ave_L
mA,dA = comp_area(_N.box, N.box) # compare area in CG vs angle in CL
n = .3
M = mL + mA; D = (abs(dL)+abs(dA)) * (M/ave); M = M- D/2
M = mL + mA; D = (abs(dL)+abs(dA)) * (M/ave); M = M - D/2
Et = np.array([M,D], dtype=float)
md_t = [np.array([np.array([mL,dL,mA,dA], dtype=float), Et,n], dtype=object)] # init as [mdExt]
if not fd: # CG
Expand All @@ -410,21 +407,21 @@ def comp_N(_N,N, rn, angle=None, dist=None, dir=None): # dir if fd, Link.derH=d
# | n = (_n+n)/2?
Et = np.append(Et, (_N.Et[2]+N.Et[2])/2 ) # Et[0] += ave_rn - rn?
subLay = CH(n=n, md_t=md_t); subLay.Et=Et
elay = CH(H=[subLay], n=n, md_t=deepcopy(md_t)); elay.Et=copy(Et)
eLay = CH(H=[subLay], n=n, md_t=deepcopy(md_t)); eLay.Et=copy(Et)
if _N.derH and N.derH:
dderH = _N.derH.comp_H(N.derH, rn, dir=dir) # comp shared layers
elay.append_(dderH, flat=1)
elif _N.derH: elay.H += [_N.derH.copy_(root=elay)] # one empty derH
elif N.derH: elay.H += [N.derH.copy_(root=elay,rev=1)]
eLay.append_(dderH, flat=1)
elif _N.derH: eLay.append_(_N.derH.copy_(root=eLay)) # one empty derH
elif N.derH: eLay.append_(N.derH.copy_(root=eLay,rev=1))
# spec: comp_node_(node_|link_), combinatorial, node_ may be nested with rng-)agg+, graph similarity search?
Et = elay.Et
Et = copy(eLay.Et)
if not fd and _N.altG and N.altG: # not for CL, eval M?
altLink = comp_N(_N.altG, N.altG, _N.altG.n/N.altG.n) # no angle,dist, init alternating PPds | dPs?
elay.altH = altLink.derH
Et += elay.altH.Et
Link = CL(nodet=[_N,N],derH=elay, n=min(_N.n,N.n),yx=np.add(_N.yx,N.yx)/2, angle=angle,dist=dist,box=extend_box(N.box,_N.box))
eLay.altH = altLink.derH
Et += eLay.altH.Et
Link = CL(nodet=[_N,N],derH=eLay, n=min(_N.n,N.n),yx=np.add(_N.yx,N.yx)/2, angle=angle,dist=dist,box=extend_box(N.box,_N.box))
if Et[0] > ave * Et[2]:
elay.root = Link
eLay.root = Link
for rev, node in zip((0,1), (N,_N)): # reverse Link direction for _N
if fd: node.rimt[1-rev] += [(Link,rev)] # opposite to _N,N dir
else: node.rim += [(Link, rev)]
Expand Down Expand Up @@ -471,11 +468,11 @@ def sum2graph(root, grapht, fd, nest): # sum node and link params into graph, a
return graph

def sum_G_(node_):

G = CG()
for n in node_:
G.n += n.n; G.rng = n.rng; G.aRad += n.aRad; G.box = extend_box(G.box, n.box)
G.latuple += n.latuple; G.mdLay += n.mdLay; G.derH.add_H(n.derH); G.extH.add_H(n.extH)
G.n += n.n; G.rng = n.rng; G.aRad += n.aRad; G.box = extend_box(G.box, n.box); G.latuple += n.latuple; G.mdLay += n.mdLay
if n.derH: G.derH.add_H(n.derH)
if n.extH: G.extH.add_H(n.extH)
return G

if __name__ == "__main__":
Expand Down

0 comments on commit d1fcd23

Please sign in to comment.