Skip to content

Commit

Permalink
edits with Chee
Browse files Browse the repository at this point in the history
  • Loading branch information
boris-kz committed Feb 18, 2025
1 parent 9703bad commit 014301a
Show file tree
Hide file tree
Showing 6 changed files with 210 additions and 147 deletions.
57 changes: 31 additions & 26 deletions frame_2D_alg/agg_recursion.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from functools import reduce
from itertools import zip_longest
from multiprocessing import Pool, Manager
from frame_blobs import frame_blobs_root, intra_blob_root, imread, aves, Caves
from frame_blobs import frame_blobs_root, intra_blob_root, imread, aves
from vect_edge import L2N, base_comp, sum_G_, comb_H_, sum_H, add_H, comp_node_, comp_link_, sum2graph, get_rim, CG, CLay, vectorize_root, extend_box, Val_, val_
'''
notation:
Expand Down Expand Up @@ -34,7 +34,7 @@
(which may include extending eval function with new match-projecting derivatives)
Similar to cross-projection by data-coordinate filters, described in "imagination, planning, action" section of part 3 in Readme.
'''
ave, ave_L, icoef, max_dist = aves.m, aves.L, aves.icoef, aves.max_dist
ave, ave_L, icoef, max_dist = aves[-2], aves[6], aves[12], aves[9]

def cross_comp(root, fn): # form agg_Level by breadth-first node_,link_ cross-comp, connect clustering, recursion

Expand Down Expand Up @@ -76,6 +76,7 @@ def cluster_N_(root, L_, fd): # top-down segment L_ by >ave ratio of L.dists
for i, L in enumerate(L_[1:], start=1):
rel_dist = L.dist/_L.dist # >= 1
if rel_dist < 1.2 or Val_(et, _Et=Et) > 0 or len(L_[i:]) < ave_L: # ~=dist Ns or either side of L is weak
# * density: L.nodet (sum(_G.derTTe[0])- ave*(_G.Et[2]*_G.Et[3])) + (sum(G.derTTe[0])- ave*(G.Et[2]*G.Et[3]))) * ccoef / ave?
_L = L; N_ += L.nodet; et += L.Et
else:
i -= 1; break # terminate contiguous-distance segment
Expand All @@ -88,7 +89,7 @@ def cluster_N_(root, L_, fd): # top-down segment L_ by >ave ratio of L.dists
eN_ = []
for eN in _eN_: # cluster rim-connected ext Ns, all in root Gt
node_+=[eN]; eN.fin = 1 # all rim
for L,_ in get_rim(eN, fd): # all +ve, * density: if L.Et[0]/ave_d * sum([n.extH.m * ccoef / ave for n in L.nodet])?
for L,_ in get_rim(eN, fd): # all +ve
if L not in link_:
eN_ += [n for n in L.nodet if not n.fin]
if L.dist < max_dist:
Expand All @@ -100,11 +101,14 @@ def cluster_N_(root, L_, fd): # top-down segment L_ by >ave ratio of L.dists
L_ = L_[i+1:]
if L_: min_dist = max_dist # next loop connects current-dist clusters via longer links
else:
nest,Q = (root.lnest, root.link_) if fd else (root.nnest, root.node_)
if nest: Q += [sum_G_(G_)]
else: Q[:] = [sum_G_(Q[:]),sum_G_(G_)] # init nesting if link_, node_ is already nested
if fd: root.lnest += 1
else: root.nnest += 1
[comb_altG_(G.altG) for G in G_]
if fd:
if root.lnest: root.link_ += [sum_G_(G_)]
else: root.link_ = [sum_G_(root.link_), sum_G_(G_)] # init nesting
root.lnest += 1
else:
root.node_ += [sum_G_(G_)] # node_ is already nested
root.nnest += 1
break
'''
Hierarchical clustering should alternate between two phases: generative via connectivity and compressive via centroid.
Expand All @@ -130,7 +134,8 @@ def sum_C(dnode_, C=None): # sum|subtract and average C-connected nodes
C.node_ = [n for n in C.node_ if n.fin] # not in -ve dnode_, may add +ve later

sum_G_(dnode_, sign, fc=1, G=C) # no extH, extend_box
sum_G_([n.altG for n in dnode_ if n.altG], sign, fc=0, falt=1, G=A) # no m, M, L in altGs
alt_ = [n.altG for n in dnode_ if n.altG]
if alt_: sum_G_(alt_, sign, fc=0, G=A) # no m, M, L in altGs
k = len(dnode_) + 1-sign
for falt, n in zip((0,1), (C, A)): # get averages
n.Et/=k; n.derTT/=k; n.aRad/=k; n.yx /= k
Expand All @@ -141,21 +146,17 @@ def sum_C(dnode_, C=None): # sum|subtract and average C-connected nodes
return C

def centroid_cluster(N, C_, root): # form and refine C cluster around N, in root node_|link_?
# proximity bias for both match and overlap?
# draft:
# init:
N.fin = 1; CN_ = [N]
for n in N_:
if not hasattr(n,'fin') or n.fin or n is N: continue # in other C or in C.node_, or not in root
radii = N.aRad + n.aRad
dy, dx = np.subtract(N.yx, n.yx)
dist = np.hypot(dy, dx)
# probably too complex:
en = len(N.extH) * N.Et[2:]; _en = len(n.extH) * n.Et[2:] # same n*o?
GV = val_(N.Et) + val_(n.Et) + (sum(N.derTTe[0])-ave*en) + (sum(n.derTTe[0])-ave*_en)
if dist > max_dist * ((radii * icoef**3) * GV): continue
n.fin = 1; CN_ += [n]
# same:
C = sum_C(CN_) # C.node_
if dist < max_dist * ((radii * icoef**3) * (val_(N.Et)+val_(n.Et))):
n.fin = 1; CN_ += [n]
# refine:
C = sum_C(CN_) # C.node_, add proximity bias for both match and overlap?
while True:
dN_, M, dM = [], 0, 0 # pruned nodes and values, or comp all nodes again?
for _N in C.node_:
Expand Down Expand Up @@ -188,7 +189,7 @@ def centroid_cluster(N, C_, root): # form and refine C cluster around N, in roo
for N in N_:
if not N.fin: # not in prior C
if Val_(N.Et, _Et=root.Et, coef=10) > 0: # cross-similar in G
centroid_cluster(N, C_, root) # form centroid around N, C_ +=[C]
centroid_cluster(N, C_, root) # form centroid cluster around N, C_ +=[C]
else:
break # the rest of N_ is lower-M
if len(C_) > ave_L:
Expand Down Expand Up @@ -244,7 +245,7 @@ def sort_H(H, fd): # re-assign olp and form priority indices for comp_tree, if
if not fd:
H.root.node_ = H.node_

def centroid_M_(m_, M, ave): # adjust weights on attr matches, also add cost attrs
def centroid_M_(m_, M, ave): # adjust weights on attr matches, add cost attrs?
_w_ = [1 for _ in m_]
while True:
w_ = [min(m/M, M/m) for m in m_] # rational deviations from mean,
Expand Down Expand Up @@ -306,21 +307,25 @@ def agg_H_seq(focus,_nestt=(1,0)): # recursive level-forming pipeline, called f
hG = Q[-1] # init
bottom = 1
for lev_G in reversed(Q[:-1]): # top level gets no feedback
hm_ = hG.derTT[0] # + ave m-associated pars: len, dist, dcoords?
hm_ = hG.derTT[0] # + m-associated coefs: len, dist, dcoords?
hm_ = centroid_M_(hm_, sum(hm_)/8, ave)
dm_ = hm_ - lev_G.aves
if sum(dm_) > ave: # update
lev_G.aves = hm_ # proj agg+'m = m + dm?
# project focus by val_* dy,dx: frame derTT dgA / baseT gA?
# mean value shift within focus, bottom only, internal search per G
hG = lev_G
else:
bottom = 0; break # feedback did not reach the bottom level
dm_t += [dm_]
bottom_t += [bottom]
if any(bottom_t) and sum(dm_t[0]) +sum(dm_t[1]) > ave:
# bottom level is refocused, new aves, rerun agg+:
agg_H_seq(focus,(frame.nnest,frame.lnest))
if any(bottom_t) and sum(dm_t[0]) + sum(dm_t[1]) > ave:
# project focus by frame bottom-lev D_val:
if Val_(lev_G.Et, _Et=lev_G.Et, coef=20) > 0: # mean value shift within focus, bottom only, internal search per G
# include temporal Dm_+ Ddm_?
dy,dx = lev_G.baseT[-2:] # gA from summed Gs
y,x,Y,X = lev_G.box # current focus?
proj_focus = image[y+dy, x+dx, Y+dy, X+dx]
# refocus bottom level with new aves, rerun agg+
agg_H_seq(proj_focus, (frame.nnest,frame.lnest))

return frame

Expand Down
12 changes: 9 additions & 3 deletions frame_2D_alg/comp_slice.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,14 @@
Connectivity in P_ is traced through root_s of derts adjacent to P.dert_, possibly forking.
len prior root_ sorted by G is root.olp, to eval for inclusion in PP or start new P by ave*olp
'''
ave, ave_d, ave_G, ave_PPm, ave_PPd, ave_L, ave_dI \
= aves.B, aves.d, aves.G, aves.PPm, aves.PPd, aves.L, aves.dI
ave = aves[-2]
ave_d = aves[1]
ave_G = aves[4]
ave_PPm = aves[22]
ave_PPd = aves[23]
ave_L = aves[6]
ave_dI = aves[14]
ave_md = [ave,ave_d]

class CdP(CBase): # produced by comp_P, comp_slice version of Clink
name = "dP"
Expand Down Expand Up @@ -125,7 +131,7 @@ def form_PP_(root, iP_, fd): # form PPs of dP.valt[fd] + connected Ps val
while _prim_:
prim_,lrim_ = set(),set()
for _P,_link in zip(_prim_,_lrim_):
if _link.Et[fd] < aves.md[fd] or _P.merged:
if _link.Et[fd] < ave_md[fd] or _P.merged:
continue
_P_.add(_P); link_.add(_link)
_I,_G,_M,_D,_L,_ = _P.latuple
Expand Down
77 changes: 77 additions & 0 deletions frame_2D_alg/deprecated/25.2.py
Original file line number Diff line number Diff line change
Expand Up @@ -266,3 +266,80 @@ def agg_H_seq(focus): # sequential level-updating pipeline
else: break
frame.node_ = agg_H
return frame

class Caves(object): # hyper-parameters, init a guess, adjusted by feedback
name = "Filters"
def __init__(ave):
ave.m = 5
ave.d = 10 # ave change to Ave_min from the root intra_blob?
ave.L = 4
ave.rn = 1000 # max scope disparity
ave.max_dist = 2
ave.coef = 10
ave.ccoef = 10 # scaling match ave to clustering ave
ave.icoef = .15 # internal M proj_val / external M proj_val
ave.med_cost = 10
# comp_slice
ave.cs = 5 # ave of comp_slice
ave.dI = 20 # ave inverse m, change to Ave from the root intra_blob?
ave.inv = 20
ave.mG = 10
ave.mM = 2
ave.mD = 2
ave.mMa = .1
ave.mA = .2
ave.mL = 2
ave.PPm = 50
ave.PPd = 50
ave.Pm = 10
ave.Pd = 10
ave.Gm = 50
ave.Lslice = 5
# slice_edge
ave.I = 100
ave.G = 100
ave.g = 30 # change to Ave from the root intra_blob?
ave.mL = 2
ave.dist = 3
ave.dangle = .95 # vertical difference between angles: -1->1, abs dangle: 0->1, ave_dangle = (min abs(dangle) + max abs(dangle))/2,
ave.olp = 5
ave.B = 30
ave.R = 10
ave.coefs = { "m": 1,
# vectorize_edge
"d": 1,
"L": 1,
"rn": 1,
"max_dist": 1,
"coef": 1,
"ccoef": 1,
"icoef": 1,
"med_cost": 1,
# comp_slice
"dI": 1,
"inv": 1,
"ave_cs_d": 1,
"mG": 1,
"mM": 1,
"mD": 1,
"mMa": 1,
"mA": 1,
"mL": 1,
"PPm": 1,
"PPd": 1,
"Pm": 1,
"Pd": 1,
"Gm": 1,
"Lslice": 1,
# slice_edge
"I": 1,
"G": 1,
"g": 1,
"dist": 1,
"dangle": 1,
"olp": 1,
"B": 1,
"R": 1
}
def sum_aves(ave):
return sum(value for value in vars(ave).values())
121 changes: 42 additions & 79 deletions frame_2D_alg/frame_blobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,82 +55,6 @@ def get_instance(cls, _id):
return inst
def __repr__(obj): return f"{obj.__class__.__name__}(id={obj.id})"

class Caves(object): # hyper-parameters, init a guess, adjusted by feedback
name = "Filters"
def __init__(ave):
ave.m = 5
ave.d = 10 # ave change to Ave_min from the root intra_blob?
ave.L = 4
ave.rn = 1000 # max scope disparity
ave.max_dist = 2
ave.coef = 10
ave.ccoef = 10 # scaling match ave to clustering ave
ave.icoef = .15 # internal M proj_val / external M proj_val
ave.med_cost = 10
# comp_slice
ave.cs = 5 # ave of comp_slice
ave.dI = 20 # ave inverse m, change to Ave from the root intra_blob?
ave.inv = 20
ave.mG = 10
ave.mM = 2
ave.mD = 2
ave.mMa = .1
ave.mA = .2
ave.mL = 2
ave.PPm = 50
ave.PPd = 50
ave.Pm = 10
ave.Pd = 10
ave.Gm = 50
ave.Lslice = 5
# slice_edge
ave.I = 100
ave.G = 100
ave.g = 30 # change to Ave from the root intra_blob?
ave.mL = 2
ave.dist = 3
ave.dangle = .95 # vertical difference between angles: -1->1, abs dangle: 0->1, ave_dangle = (min abs(dangle) + max abs(dangle))/2,
ave.olp = 5
ave.B = 30
ave.R = 10
ave.coefs = { "m": 1,
# vectorize_edge
"d": 1,
"L": 1,
"rn": 1,
"max_dist": 1,
"coef": 1,
"ccoef": 1,
"icoef": 1,
"med_cost": 1,
# comp_slice
"dI": 1,
"inv": 1,
"ave_cs_d": 1,
"mG": 1,
"mM": 1,
"mD": 1,
"mMa": 1,
"mA": 1,
"mL": 1,
"PPm": 1,
"PPd": 1,
"Pm": 1,
"Pd": 1,
"Gm": 1,
"Lslice": 1,
# slice_edge
"I": 1,
"G": 1,
"g": 1,
"dist": 1,
"dangle": 1,
"olp": 1,
"B": 1,
"R": 1
}
def sum_aves(ave):
return sum(value for value in vars(ave).values())

def __getattribute__(ave,name):
coefs = object.__getattribute__(ave, "coefs")
Expand All @@ -141,9 +65,48 @@ def __getattribute__(ave,name):
else:
return object.__getattribute__(ave, name) * coefs[name] # always return ave * coef

aves = Caves()
ave = aves.B # base filter, directly used for comp_r fork
aveR = aves.R # for range+, fixed overhead per blob
# hyper-parameters, init a guess, adjusted by feedback
aves = np.array([
5, # ave.m
10, # ave.d = ave change to Ave_min from the root intra_blob?
2, # ave.n
100, # ave.I
100, # ave.G
5, # ave.Ga
1, # ave.L
5, # ave.LA
1000, # ave.rn = max scope disparity
2, # ave.max_dist
10, # ave.coef
10, # ave.ccoef = scaling match ave to clustering ave
.15, # ave.icoef = internal M proj_val / external M proj_val
10, # ave.med_cost
# comp_slice
20, # ave.dI = ave inverse m, change to Ave from the root intra_blob?
20, # ave.inv
10, # ave.mG
2, # ave.mM
2, # ave.mD
.1, # ave.mMa
.2, # ave.mA
2, # ave.mL
50, # ave.PPm
50, # ave.PPd
10, # ave.Pm
10, # ave.Pd
50, # ave.Gm
5, # ave.Lslice
# slice_edge
30, # ave.g = change to Ave from the root intra_blob?
2, # ave.mL
3, # ave.dist
.95, # ave.dangle = vertical difference between angles: -1->1, abs dangle: 0->1, ave_dangle = (min abs(dangle) + max abs(dangle))/2,
5, # ave.olp
30, # ave.B
10 # ave.R
])
ave = aves[-2] # base filter, directly used for comp_r fork
aveR = aves[-1] # for range+, fixed overhead per blob

class CFrame(CBase):

Expand Down
Loading

0 comments on commit 014301a

Please sign in to comment.