From 014301af4718ef072720979de2d3e53dbb2838e9 Mon Sep 17 00:00:00 2001 From: boris-kz Date: Mon, 17 Feb 2025 22:58:05 -0500 Subject: [PATCH] edits with Chee --- frame_2D_alg/agg_recursion.py | 57 ++++++++------- frame_2D_alg/comp_slice.py | 12 +++- frame_2D_alg/deprecated/25.2.py | 77 ++++++++++++++++++++ frame_2D_alg/frame_blobs.py | 121 +++++++++++--------------------- frame_2D_alg/slice_edge.py | 20 +++--- frame_2D_alg/vect_edge.py | 70 ++++++++++-------- 6 files changed, 210 insertions(+), 147 deletions(-) diff --git a/frame_2D_alg/agg_recursion.py b/frame_2D_alg/agg_recursion.py index 151107ded..4a9cf1a64 100644 --- a/frame_2D_alg/agg_recursion.py +++ b/frame_2D_alg/agg_recursion.py @@ -3,7 +3,7 @@ from functools import reduce from itertools import zip_longest from multiprocessing import Pool, Manager -from frame_blobs import frame_blobs_root, intra_blob_root, imread, aves, Caves +from frame_blobs import frame_blobs_root, intra_blob_root, imread, aves from vect_edge import L2N, base_comp, sum_G_, comb_H_, sum_H, add_H, comp_node_, comp_link_, sum2graph, get_rim, CG, CLay, vectorize_root, extend_box, Val_, val_ ''' notation: @@ -34,7 +34,7 @@ (which may include extending eval function with new match-projecting derivatives) Similar to cross-projection by data-coordinate filters, described in "imagination, planning, action" section of part 3 in Readme. ''' -ave, ave_L, icoef, max_dist = aves.m, aves.L, aves.icoef, aves.max_dist +ave, ave_L, icoef, max_dist = aves[-2], aves[6], aves[12], aves[9] def cross_comp(root, fn): # form agg_Level by breadth-first node_,link_ cross-comp, connect clustering, recursion @@ -76,6 +76,7 @@ def cluster_N_(root, L_, fd): # top-down segment L_ by >ave ratio of L.dists for i, L in enumerate(L_[1:], start=1): rel_dist = L.dist/_L.dist # >= 1 if rel_dist < 1.2 or Val_(et, _Et=Et) > 0 or len(L_[i:]) < ave_L: # ~=dist Ns or either side of L is weak + # * density: L.nodet (sum(_G.derTTe[0])- ave*(_G.Et[2]*_G.Et[3])) + (sum(G.derTTe[0])- ave*(G.Et[2]*G.Et[3]))) * ccoef / ave? _L = L; N_ += L.nodet; et += L.Et else: i -= 1; break # terminate contiguous-distance segment @@ -88,7 +89,7 @@ def cluster_N_(root, L_, fd): # top-down segment L_ by >ave ratio of L.dists eN_ = [] for eN in _eN_: # cluster rim-connected ext Ns, all in root Gt node_+=[eN]; eN.fin = 1 # all rim - for L,_ in get_rim(eN, fd): # all +ve, * density: if L.Et[0]/ave_d * sum([n.extH.m * ccoef / ave for n in L.nodet])? + for L,_ in get_rim(eN, fd): # all +ve if L not in link_: eN_ += [n for n in L.nodet if not n.fin] if L.dist < max_dist: @@ -100,11 +101,14 @@ def cluster_N_(root, L_, fd): # top-down segment L_ by >ave ratio of L.dists L_ = L_[i+1:] if L_: min_dist = max_dist # next loop connects current-dist clusters via longer links else: - nest,Q = (root.lnest, root.link_) if fd else (root.nnest, root.node_) - if nest: Q += [sum_G_(G_)] - else: Q[:] = [sum_G_(Q[:]),sum_G_(G_)] # init nesting if link_, node_ is already nested - if fd: root.lnest += 1 - else: root.nnest += 1 + [comb_altG_(G.altG) for G in G_] + if fd: + if root.lnest: root.link_ += [sum_G_(G_)] + else: root.link_ = [sum_G_(root.link_), sum_G_(G_)] # init nesting + root.lnest += 1 + else: + root.node_ += [sum_G_(G_)] # node_ is already nested + root.nnest += 1 break ''' Hierarchical clustering should alternate between two phases: generative via connectivity and compressive via centroid. @@ -130,7 +134,8 @@ def sum_C(dnode_, C=None): # sum|subtract and average C-connected nodes C.node_ = [n for n in C.node_ if n.fin] # not in -ve dnode_, may add +ve later sum_G_(dnode_, sign, fc=1, G=C) # no extH, extend_box - sum_G_([n.altG for n in dnode_ if n.altG], sign, fc=0, falt=1, G=A) # no m, M, L in altGs + alt_ = [n.altG for n in dnode_ if n.altG] + if alt_: sum_G_(alt_, sign, fc=0, G=A) # no m, M, L in altGs k = len(dnode_) + 1-sign for falt, n in zip((0,1), (C, A)): # get averages n.Et/=k; n.derTT/=k; n.aRad/=k; n.yx /= k @@ -141,21 +146,17 @@ def sum_C(dnode_, C=None): # sum|subtract and average C-connected nodes return C def centroid_cluster(N, C_, root): # form and refine C cluster around N, in root node_|link_? - # proximity bias for both match and overlap? - # draft: + # init: N.fin = 1; CN_ = [N] for n in N_: if not hasattr(n,'fin') or n.fin or n is N: continue # in other C or in C.node_, or not in root radii = N.aRad + n.aRad dy, dx = np.subtract(N.yx, n.yx) dist = np.hypot(dy, dx) - # probably too complex: - en = len(N.extH) * N.Et[2:]; _en = len(n.extH) * n.Et[2:] # same n*o? - GV = val_(N.Et) + val_(n.Et) + (sum(N.derTTe[0])-ave*en) + (sum(n.derTTe[0])-ave*_en) - if dist > max_dist * ((radii * icoef**3) * GV): continue - n.fin = 1; CN_ += [n] - # same: - C = sum_C(CN_) # C.node_ + if dist < max_dist * ((radii * icoef**3) * (val_(N.Et)+val_(n.Et))): + n.fin = 1; CN_ += [n] + # refine: + C = sum_C(CN_) # C.node_, add proximity bias for both match and overlap? while True: dN_, M, dM = [], 0, 0 # pruned nodes and values, or comp all nodes again? for _N in C.node_: @@ -188,7 +189,7 @@ def centroid_cluster(N, C_, root): # form and refine C cluster around N, in roo for N in N_: if not N.fin: # not in prior C if Val_(N.Et, _Et=root.Et, coef=10) > 0: # cross-similar in G - centroid_cluster(N, C_, root) # form centroid around N, C_ +=[C] + centroid_cluster(N, C_, root) # form centroid cluster around N, C_ +=[C] else: break # the rest of N_ is lower-M if len(C_) > ave_L: @@ -244,7 +245,7 @@ def sort_H(H, fd): # re-assign olp and form priority indices for comp_tree, if if not fd: H.root.node_ = H.node_ -def centroid_M_(m_, M, ave): # adjust weights on attr matches, also add cost attrs +def centroid_M_(m_, M, ave): # adjust weights on attr matches, add cost attrs? _w_ = [1 for _ in m_] while True: w_ = [min(m/M, M/m) for m in m_] # rational deviations from mean, @@ -306,21 +307,25 @@ def agg_H_seq(focus,_nestt=(1,0)): # recursive level-forming pipeline, called f hG = Q[-1] # init bottom = 1 for lev_G in reversed(Q[:-1]): # top level gets no feedback - hm_ = hG.derTT[0] # + ave m-associated pars: len, dist, dcoords? + hm_ = hG.derTT[0] # + m-associated coefs: len, dist, dcoords? hm_ = centroid_M_(hm_, sum(hm_)/8, ave) dm_ = hm_ - lev_G.aves if sum(dm_) > ave: # update lev_G.aves = hm_ # proj agg+'m = m + dm? - # project focus by val_* dy,dx: frame derTT dgA / baseT gA? - # mean value shift within focus, bottom only, internal search per G hG = lev_G else: bottom = 0; break # feedback did not reach the bottom level dm_t += [dm_] bottom_t += [bottom] - if any(bottom_t) and sum(dm_t[0]) +sum(dm_t[1]) > ave: - # bottom level is refocused, new aves, rerun agg+: - agg_H_seq(focus,(frame.nnest,frame.lnest)) + if any(bottom_t) and sum(dm_t[0]) + sum(dm_t[1]) > ave: + # project focus by frame bottom-lev D_val: + if Val_(lev_G.Et, _Et=lev_G.Et, coef=20) > 0: # mean value shift within focus, bottom only, internal search per G + # include temporal Dm_+ Ddm_? + dy,dx = lev_G.baseT[-2:] # gA from summed Gs + y,x,Y,X = lev_G.box # current focus? + proj_focus = image[y+dy, x+dx, Y+dy, X+dx] + # refocus bottom level with new aves, rerun agg+ + agg_H_seq(proj_focus, (frame.nnest,frame.lnest)) return frame diff --git a/frame_2D_alg/comp_slice.py b/frame_2D_alg/comp_slice.py index 243e44f4b..f7b6dcec2 100644 --- a/frame_2D_alg/comp_slice.py +++ b/frame_2D_alg/comp_slice.py @@ -26,8 +26,14 @@ Connectivity in P_ is traced through root_s of derts adjacent to P.dert_, possibly forking. len prior root_ sorted by G is root.olp, to eval for inclusion in PP or start new P by ave*olp ''' -ave, ave_d, ave_G, ave_PPm, ave_PPd, ave_L, ave_dI \ - = aves.B, aves.d, aves.G, aves.PPm, aves.PPd, aves.L, aves.dI +ave = aves[-2] +ave_d = aves[1] +ave_G = aves[4] +ave_PPm = aves[22] +ave_PPd = aves[23] +ave_L = aves[6] +ave_dI = aves[14] +ave_md = [ave,ave_d] class CdP(CBase): # produced by comp_P, comp_slice version of Clink name = "dP" @@ -125,7 +131,7 @@ def form_PP_(root, iP_, fd): # form PPs of dP.valt[fd] + connected Ps val while _prim_: prim_,lrim_ = set(),set() for _P,_link in zip(_prim_,_lrim_): - if _link.Et[fd] < aves.md[fd] or _P.merged: + if _link.Et[fd] < ave_md[fd] or _P.merged: continue _P_.add(_P); link_.add(_link) _I,_G,_M,_D,_L,_ = _P.latuple diff --git a/frame_2D_alg/deprecated/25.2.py b/frame_2D_alg/deprecated/25.2.py index ea86f0a08..c6ef9cfbf 100644 --- a/frame_2D_alg/deprecated/25.2.py +++ b/frame_2D_alg/deprecated/25.2.py @@ -266,3 +266,80 @@ def agg_H_seq(focus): # sequential level-updating pipeline else: break frame.node_ = agg_H return frame + +class Caves(object): # hyper-parameters, init a guess, adjusted by feedback + name = "Filters" + def __init__(ave): + ave.m = 5 + ave.d = 10 # ave change to Ave_min from the root intra_blob? + ave.L = 4 + ave.rn = 1000 # max scope disparity + ave.max_dist = 2 + ave.coef = 10 + ave.ccoef = 10 # scaling match ave to clustering ave + ave.icoef = .15 # internal M proj_val / external M proj_val + ave.med_cost = 10 + # comp_slice + ave.cs = 5 # ave of comp_slice + ave.dI = 20 # ave inverse m, change to Ave from the root intra_blob? + ave.inv = 20 + ave.mG = 10 + ave.mM = 2 + ave.mD = 2 + ave.mMa = .1 + ave.mA = .2 + ave.mL = 2 + ave.PPm = 50 + ave.PPd = 50 + ave.Pm = 10 + ave.Pd = 10 + ave.Gm = 50 + ave.Lslice = 5 + # slice_edge + ave.I = 100 + ave.G = 100 + ave.g = 30 # change to Ave from the root intra_blob? + ave.mL = 2 + ave.dist = 3 + ave.dangle = .95 # vertical difference between angles: -1->1, abs dangle: 0->1, ave_dangle = (min abs(dangle) + max abs(dangle))/2, + ave.olp = 5 + ave.B = 30 + ave.R = 10 + ave.coefs = { "m": 1, + # vectorize_edge + "d": 1, + "L": 1, + "rn": 1, + "max_dist": 1, + "coef": 1, + "ccoef": 1, + "icoef": 1, + "med_cost": 1, + # comp_slice + "dI": 1, + "inv": 1, + "ave_cs_d": 1, + "mG": 1, + "mM": 1, + "mD": 1, + "mMa": 1, + "mA": 1, + "mL": 1, + "PPm": 1, + "PPd": 1, + "Pm": 1, + "Pd": 1, + "Gm": 1, + "Lslice": 1, + # slice_edge + "I": 1, + "G": 1, + "g": 1, + "dist": 1, + "dangle": 1, + "olp": 1, + "B": 1, + "R": 1 + } + def sum_aves(ave): + return sum(value for value in vars(ave).values()) diff --git a/frame_2D_alg/frame_blobs.py b/frame_2D_alg/frame_blobs.py index 910a45857..de984a5d9 100644 --- a/frame_2D_alg/frame_blobs.py +++ b/frame_2D_alg/frame_blobs.py @@ -55,82 +55,6 @@ def get_instance(cls, _id): return inst def __repr__(obj): return f"{obj.__class__.__name__}(id={obj.id})" -class Caves(object): # hyper-parameters, init a guess, adjusted by feedback - name = "Filters" - def __init__(ave): - ave.m = 5 - ave.d = 10 # ave change to Ave_min from the root intra_blob? - ave.L = 4 - ave.rn = 1000 # max scope disparity - ave.max_dist = 2 - ave.coef = 10 - ave.ccoef = 10 # scaling match ave to clustering ave - ave.icoef = .15 # internal M proj_val / external M proj_val - ave.med_cost = 10 - # comp_slice - ave.cs = 5 # ave of comp_slice - ave.dI = 20 # ave inverse m, change to Ave from the root intra_blob? - ave.inv = 20 - ave.mG = 10 - ave.mM = 2 - ave.mD = 2 - ave.mMa = .1 - ave.mA = .2 - ave.mL = 2 - ave.PPm = 50 - ave.PPd = 50 - ave.Pm = 10 - ave.Pd = 10 - ave.Gm = 50 - ave.Lslice = 5 - # slice_edge - ave.I = 100 - ave.G = 100 - ave.g = 30 # change to Ave from the root intra_blob? - ave.mL = 2 - ave.dist = 3 - ave.dangle = .95 # vertical difference between angles: -1->1, abs dangle: 0->1, ave_dangle = (min abs(dangle) + max abs(dangle))/2, - ave.olp = 5 - ave.B = 30 - ave.R = 10 - ave.coefs = { "m": 1, - # vectorize_edge - "d": 1, - "L": 1, - "rn": 1, - "max_dist": 1, - "coef": 1, - "ccoef": 1, - "icoef": 1, - "med_cost": 1, - # comp_slice - "dI": 1, - "inv": 1, - "ave_cs_d": 1, - "mG": 1, - "mM": 1, - "mD": 1, - "mMa": 1, - "mA": 1, - "mL": 1, - "PPm": 1, - "PPd": 1, - "Pm": 1, - "Pd": 1, - "Gm": 1, - "Lslice": 1, - # slice_edge - "I": 1, - "G": 1, - "g": 1, - "dist": 1, - "dangle": 1, - "olp": 1, - "B": 1, - "R": 1 - } - def sum_aves(ave): - return sum(value for value in vars(ave).values()) def __getattribute__(ave,name): coefs = object.__getattribute__(ave, "coefs") @@ -141,9 +65,48 @@ def __getattribute__(ave,name): else: return object.__getattribute__(ave, name) * coefs[name] # always return ave * coef -aves = Caves() -ave = aves.B # base filter, directly used for comp_r fork -aveR = aves.R # for range+, fixed overhead per blob + # hyper-parameters, init a guess, adjusted by feedback +aves = np.array([ + 5, # ave.m + 10, # ave.d = ave change to Ave_min from the root intra_blob? + 2, # ave.n + 100, # ave.I + 100, # ave.G + 5, # ave.Ga + 1, # ave.L + 5, # ave.LA + 1000, # ave.rn = max scope disparity + 2, # ave.max_dist + 10, # ave.coef + 10, # ave.ccoef = scaling match ave to clustering ave + .15, # ave.icoef = internal M proj_val / external M proj_val + 10, # ave.med_cost + # comp_slice + 20, # ave.dI = ave inverse m, change to Ave from the root intra_blob? + 20, # ave.inv + 10, # ave.mG + 2, # ave.mM + 2, # ave.mD + .1, # ave.mMa + .2, # ave.mA + 2, # ave.mL + 50, # ave.PPm + 50, # ave.PPd + 10, # ave.Pm + 10, # ave.Pd + 50, # ave.Gm + 5, # ave.Lslice + # slice_edge + 30, # ave.g = change to Ave from the root intra_blob? + 2, # ave.mL + 3, # ave.dist + .95, # ave.dangle = vertical difference between angles: -1->1, abs dangle: 0->1, ave_dangle = (min abs(dangle) + max abs(dangle))/2, + 5, # ave.olp + 30, # ave.B + 10 # ave.R + ]) +ave = aves[-2] # base filter, directly used for comp_r fork +aveR = aves[-1] # for range+, fixed overhead per blob class CFrame(CBase): diff --git a/frame_2D_alg/slice_edge.py b/frame_2D_alg/slice_edge.py index fced24bce..c9cadd5a8 100644 --- a/frame_2D_alg/slice_edge.py +++ b/frame_2D_alg/slice_edge.py @@ -2,7 +2,7 @@ from collections import defaultdict from itertools import combinations from math import atan2, cos, floor, pi -from frame_blobs import frame_blobs_root, intra_blob_root, CBase, imread, unpack_blob_ +from frame_blobs import frame_blobs_root, intra_blob_root, CBase, imread, unpack_blob_, aves ''' In natural images, objects look very fuzzy and frequently interrupted, only vaguely suggested by initial blobs and contours. Potential object is proximate low-gradient (flat) blobs, with rough / thick boundary of adjacent high-gradient (edge) blobs. @@ -16,6 +16,9 @@ and inverse gradient deviation of flat blobs. But the latter is implicit here: high-gradient areas are usually quite sparse. A stable combination of a core flat blob with adjacent edge blobs is a potential object. ''' +ave_I = aves[3] +ave_G = aves[4] +ave_dangle = aves[-4] class CP(CBase): def __init__(P, yx, axis): @@ -32,7 +35,7 @@ def vectorize_root(frame): if not blob.sign and blob.G > frame.ave.G: slice_edge(blob, frame.ave) -def slice_edge(edge, aves): +def slice_edge(edge): axisd = select_max(edge) yx_ = sorted(axisd.keys(), key=lambda yx: edge.dert_[yx][-1]) # sort by g @@ -40,8 +43,8 @@ def slice_edge(edge, aves): # form P/ local max yx: while yx_: yx = yx_.pop(); axis = axisd[yx] # get max of g maxes - P = form_P(CP(yx, axis), edge, ave_I = aves.I, ave_G = aves.G, ave_dangle = aves.dangle) - edge.P_ += [P] + P = form_P(CP(yx, axis), edge) + if P: edge.P_ += [P] yx_ = [yx for yx in yx_ if yx not in edge.rootd] # remove merged maxes if any edge.P_.sort(key=lambda P: P.yx, reverse=True) trace_P_adjacency(edge) @@ -63,11 +66,10 @@ def select_max(edge): if new_max: axisd[y, x] = sa, ca return axisd -def form_P(P, edge, ave_I,ave_G,ave_dangle): - y, x = P.yx +def form_P(P, edge): + y, x = ix, iy = P.yx ay, ax = P.axis center_dert = i,gy,gx,g = edge.dert_[y,x] # dert is None if _y,_x not in edge.dert_: return` in `interpolate2dert` - edge.rootd[y,x] = P I,Dy,Dx,G, M,D,L = i,gy,gx,g, 0,0,1 P.yx_ = [P.yx] P.dert_ += [center_dert] @@ -99,7 +101,9 @@ def form_P(P, edge, ave_I,ave_G,ave_dangle): P.yx = tuple(np.mean([P.yx_[0], P.yx_[-1]], axis=0)) # new center P.latuple = new_latuple(I,G, M,D, L, [Dy, Dx]) - return P + if len(P.dert_)>1: # skip single dert's P + edge.rootd[iy,ix] = P + return P def trace_P_adjacency(edge): # fill and trace across slices diff --git a/frame_2D_alg/vect_edge.py b/frame_2D_alg/vect_edge.py index 1c028cc5c..d1b381c9e 100644 --- a/frame_2D_alg/vect_edge.py +++ b/frame_2D_alg/vect_edge.py @@ -1,4 +1,4 @@ -from frame_blobs import CBase, frame_blobs_root, intra_blob_root, imread, unpack_blob_, aves, Caves +from frame_blobs import CBase, frame_blobs_root, intra_blob_root, imread, unpack_blob_, aves from slice_edge import slice_edge, comp_angle from comp_slice import comp_slice from itertools import combinations, zip_longest @@ -37,8 +37,15 @@ postfix _ denotes array of same-name elements, multiple _s is nested array capitalized variables are usually summed small-case variables ''' -ave, ave_d, ave_L, ave_G, max_dist, ave_rn, ccoef, icoef, med_cost, ave_dI = \ -aves.B, aves.d, aves.L, aves.G, aves.max_dist, aves.rn, aves.ccoef, aves.icoef, aves.med_cost, aves.dI +ave = aves[-2] +ave_d = aves[1] +ave_L = aves[6] +ave_G = aves[4] +max_dist = aves[9] +ave_rn = aves[8] +icoef = aves[12] +med_cost = aves[13] +ave_dI = aves[14] class CLay(CBase): # flat layer if derivation hierarchy name = "lay" @@ -48,7 +55,7 @@ def __init__(l, **kwargs): l.root = kwargs.get('root', None) # higher node or link l.node_ = kwargs.get('node_', []) # concat across fork tree l.link_ = kwargs.get('link_', []) - l.derTT = kwargs.get('derTT', np.zeros((2,8))) # [[mEt,mBase,mExt],[dEt,dBase,dExt]], sum across fork tree + l.derTT = kwargs.get('derTT', np.zeros((2,8))) # [[mBase,mEt,mExt],[dBase,dEt,dExt]], sum across fork tree # altL = CLay from comp altG # i = kwargs.get('i', 0) # lay index in root.node_, link_, to revise olp # i_ = kwargs.get('i_',[]) # priority indices to compare node H by m | link H by d @@ -103,13 +110,14 @@ class CG(CBase): # PP | graph | blob: params of single-fork node_ cluster # graph / node def __init__(G, **kwargs): super().__init__() - G.fd_ = kwargs.get('fd_',[]) # list of forks forming G, 1 if cluster of Ls | lGs, for feedback only? + G.fd = kwargs.get('fd',[]) # or fd_: list of forks forming G, 1 if cluster of Ls | lGs, for feedback only? G.root = kwargs.get('root') # may extend to list in cluster_N_, same nodes may be in multiple dist layers G.Et = kwargs.get('Et', np.zeros(4)) # sum all params M,D,n,o G.yx = kwargs.get('yx', np.zeros(2)) # init PP.yx = [(y+Y)/2,(x,X)/2], then ave node yx G.box = kwargs.get('box', np.array([np.inf,-np.inf,np.inf,-np.inf])) # y,Y,x,X, area: (Y-y)*(X-x), G.baseT = kwargs.get('baseT',[]) # I,G,Dy,Dx G.derTT = kwargs.get('derTT',np.zeros((2,8))) # m,d / baseT,Et,box, summed across derH lay forks + G.derTTe = kwargs.get('derTTe',np.zeros((2,8))) # sum across link.derHs G.derH = kwargs.get('derH',[]) # each lay is [m,d]: Clay(Et,node_,link_,derTT), sum|concat links across fork tree G.extH = kwargs.get('extH',[]) # sum from rims, single-fork G.maxL = kwargs.get('maxL', 0) # if dist-nested in cluster_N_ @@ -229,8 +237,7 @@ def cluster_PP_(N_, fd): def comp_node_(_N_, L=0): # rng+ forms layer of rim and extH per N, appends N_,L_,Et, ~ graph CNN without backprop _Gp_ = [] # [G pair + co-positionals], for top-nested Ns, unless cross-nesting comp: - if L: _N_ = filter(lambda N: len(N.derH)==L, _N_) - # form derTTe per N.extH, for eval or comp? + if L: _N_ = filter(lambda N: len(N.derH)==L, _N_) # if dist-nested for _G, G in combinations(_N_, r=2): # if max len derH in agg+ _n, n = _G.Et[2], G.Et[2]; rn = _n/n if _n>n else n/_n if rn > ave_rn: # scope disparity or _G.depth != G.depth, not needed? @@ -252,9 +259,7 @@ def comp_node_(_N_, L=0): # rng+ forms layer of rim and extH per N, appends N_, if _nrim & nrim: # indirectly connected Gs, continue # no direct match priority? # dist vs. radii * induction, mainly / extH? - en = len(G.extH) * G.Et[2:]; _en = len(_G.extH) * _G.Et[2:] # same n*o? - GV = val_(_G.Et) + val_(G.Et) + (sum(_G.derTTe[0])-ave*en) + (sum(G.derTTe[0])-ave*_en) - if dist < max_dist * ((radii * icoef**3) * GV): + if dist < max_dist * ((radii * icoef**3) * (val_(_G.Et)+val_(G.Et))): Link = comp_N(_G,G, angle=[dy,dx], dist=dist) L_ += [Link] # include -ve links if val_(Link.Et) > 0: @@ -334,14 +339,14 @@ def base_comp(_N, N, dir=1, fd=0): # comp Et, Box, baseT, derTT rn = _n/n; mn = (ave_rn-rn) / max(rn, ave_rn) # ? * priority coef? nM = M*rn; dM = _M - nM; mM = min(_M,nM) / max(_M,nM) nD = D*rn; dD = _D - nD; mD = min(_D,nD) / max(_D,nD) - - if N.baseT: # empty in CL + # comp baseT: + if N.fd: # empty in CL + mI,mG,mgA, dI,dG,dgA = .0,.0,.0,.0,.0,.0 + else: _I,_G,_Dy,_Dx = _N.baseT; I,G,Dy,Dx = N.baseT # I,G,Angle I*=rn; dI = _I - I; mI = abs(dI) / ave_dI G*=rn; dG = _G - G; mG = min(_G,G) / max(_G,G) mgA, dgA = comp_angle((_Dy,_Dx),(Dy*rn,Dx*rn)) - else: - mI,mG,mgA, dI,dG,dgA = .0,.0,.0,.0,.0,.0 # comp ext: _y,_x,_Y,_X = _N.box; y,x,Y,X = np.array(N.box) * rn _dy,_dx, dy, dx = _Y-_y, _X-_x, Y-y, X-x @@ -368,13 +373,13 @@ def comp_N(_N,N, angle=None, dist=None, dir=1): # compare links, relative N dir Et = np.array([M,D, 8, (_N.Et[3]+ N.Et[3]) /2]) # n comp vars, inherited olp derTT = np.array([m_,d_]) Link = CL(fd=fd,nodet=[_N,N], derTT=derTT, yx=np.add(_N.yx,N.yx)/2, angle=angle, dist=dist, box=extend_box(N.box,_N.box)) - + # spec / lay: if M > ave and (len(N.derH) > 2 or isinstance(N,CL)): # else derH is redundant to dext,vert dderH = comp_H(_N.derH, N.derH, rn, Link, Et, fd) # comp shared layers, if any - # spec: comp_node_(node_|link_) + # spec / comp_node_(node_|link_)? Link.derH = [CLay(root=Link,Et=Et,node_=[_N,N],link_=[Link], derTT=copy(derTT)), *dderH] for lay in dderH: derTT += lay.derTT - # spec: + # spec / alt: if not fd and _N.altG and N.altG: # if alt M? Link.altL = comp_N(_N.altG, N.altG, _N.altG.Et[2] / N.altG.Et[2]) Et += Link.altL.Et @@ -392,10 +397,10 @@ def get_rim(N,fd): return N.rimt[0] + N.rimt[1] if fd else N.rim # add nesting def sum2graph(root, grapht, fd, minL=0, maxL=None): # sum node and link params into graph, aggH in agg+ or player in sub+ node_, link_, Et = grapht - graph = CG(fd_=node_[0].fd_+[fd], Et=Et*icoef, root=root, node_=[],link_=link_, maxL=maxL, nnest=root.nnest, lnest=root.lnest) + graph = CG(fd=fd, Et=Et*icoef, root=root, node_=[],link_=link_, maxL=maxL, nnest=root.nnest, lnest=root.lnest, baseT=copy(node_[0].baseT)) # arg Et is weaker if internal, maxL,minL: max and min L.dist in graph.link_ N_, yx_ = [],[] - for N in node_: + for i, N in enumerate(node_): fc = 0 if minL: # > 0, inclusive, = lower-layer exclusive maxL, if G was distance-nested in cluster_N_ while N.root.maxL and N.root is not graph and (minL != N.root.maxL): # maxL=0 in edge|frame @@ -407,9 +412,7 @@ def sum2graph(root, grapht, fd, minL=0, maxL=None): # sum node and link params yx_ += [N.yx] graph.box = extend_box(graph.box, N.box) # pre-compute graph.area += N.area? graph.Et += N.Et * icoef ** 2 # deeper, lower weight - if not fd: # skip CL - if graph.baseT: graph.baseT += N.baseT - else: graph.baseT = copy(N.baseT) + if i and not fd: graph.baseT += N.baseT # skip CL N.root = graph graph.node_= N_ # nodes or roots, link_ is still current-dist links only? graph.derH = [[CLay(root=graph), lay] for lay in sum_H(link_, graph, fd=1)] # sum and nest link derH @@ -453,6 +456,7 @@ def add_H(H, h, root, rev=0, fc=0, fd=0): # add fork L.derHs if fd: # one-fork lays if Lay: Lay.add_lay(lay,rev=rev,fc=fc) else: H += [lay.copy_(root=root,rev=rev,fc=fc)] + root.derTTe += lay.derTT root.Et += lay.Et else: # two-fork lays if Lay: @@ -464,7 +468,9 @@ def add_H(H, h, root, rev=0, fc=0, fd=0): # add fork L.derHs else: Lay = [] for fork in lay: - Lay += [fork.copy_(root=root,rev=rev,fc=fc)]; root.Et += fork.Et + Lay += [fork.copy_(root=root,rev=rev,fc=fc)] + root.derTT += fork.derTT + root.Et += fork.Et H += [Lay] def comp_H(H,h, rn, root, Et, fd): # one-fork derH if fd, else two-fork derH @@ -481,37 +487,38 @@ def comp_H(H,h, rn, root, Et, fd): # one-fork derH if fd, else two-fork derH dlay = _fork.comp_lay(fork, rn,root=root) if dLay: dLay.add_lay(dlay) # sum ds between input forks else: dLay = dlay - Et += dLay.Et + # assuming prior base_comp, only deviations should be summed in Et: + Et[:2] += lay.Et[:2] / lay.Et[2] - Et[:2] / lay.Et[2] derH += [dLay] return derH def sum_G_(node_, s=1, fc=0, G=None): - fn = node_[0].baseT if G is None: - G = CG(); G.aves = Caves() + G = copy_(node_[0]); node_=node_[:-1] for n in node_: - if fn: G.baseT += n.baseT * s + if not G.fd: + G.baseT += n.baseT * s; G.derTTe += n.derTTe G.derTT += n.derTT * s; G.Et += n.Et * s; G.aRad += n.aRad * s; G.yx += n.yx * s if n.derH: - add_H(G.derH, n.derH, root=G, rev = s==-1, fc=fc, fd=not fn) # alt is single layer + add_H(G.derH, n.derH, root=G, rev = s==-1, fc=fc, fd=G.fd) # alt is single layer if fc: G.M += n.m * s; G.L += s else: - if n.extH: add_H(G.extH, n.extH, root=G, rev = s==-1, fd=not fn) # empty in centroid + if n.extH: add_H(G.extH, n.extH, root=G, rev = s==-1, fd=G.fd) # empty in centroid G.box = extend_box( G.box, n.box) # extended per separate node_ in centroid return G def L2N(link_,root): for L in link_: - L.root = root; L.fd_=copy(L.nodet[0].fd_); L.mL_t,L.rimt = [[],[]],[[],[]]; L.aRad=0; L.visited_,L.extH = [],[]; L.baseT = [] + L.root=root; L.fd_=copy(L.nodet[0].fd_); L.mL_t,L.rimt=[[],[]],[[],[]]; L.aRad=0; L.visited_,L.extH=[],[]; L.baseT=[]; L.derTTe=np.zeros((2,8)) def frame2G(G, **kwargs): blob2G(G, **kwargs) G.derH = kwargs.get('derH', [CLay(root=G, Et=np.zeros(4), derTT=[], node_=[],link_ =[])]) G.Et = kwargs.get('Et', np.zeros(4)) G.node_ = kwargs.get('node_', []) - G.aves = Caves() # per frame's aves + G.aves = aves def blob2G(G, **kwargs): # node_, Et stays the same: @@ -524,6 +531,7 @@ def blob2G(G, **kwargs): G.extH = [] # sum from rims G.baseT = [] # I,G,Dy,Dx G.derTT = kwargs.get('derTT', np.zeros((2,8))) # m_,d_ base params + G.derTTe = kwargs.get('derTTe', np.zeros((2,8))) G.box = kwargs.get('box', np.array([np.inf,np.inf,-np.inf,-np.inf])) # y0,x0,yn,xn G.yx = kwargs.get('yx', np.zeros(2)) # init PP.yx = (y+Y)/2,(x+X)/2, then ave node yx G.rim = [] # flat links of any rng, may be nested in clustering