diff --git a/custom_components/bermuda/bermuda_device.py b/custom_components/bermuda/bermuda_device.py index 8650955..4e2c126 100644 --- a/custom_components/bermuda/bermuda_device.py +++ b/custom_components/bermuda/bermuda_device.py @@ -12,6 +12,8 @@ from __future__ import annotations +from typing import TYPE_CHECKING, cast + import re from homeassistant.components.bluetooth import MONOTONIC_TIME, BluetoothScannerDevice @@ -35,6 +37,8 @@ DEFAULT_DEVTRACK_TIMEOUT, ) +if TYPE_CHECKING: + from .coordinator import BermudaDataUpdateCoordinator class BermudaDevice(dict): """ @@ -48,15 +52,15 @@ class BermudaDevice(dict): become entities in homeassistant, since there might be a _lot_ of them. """ - def __init__(self, address, options) -> None: + def __init__(self, address, coordinator: BermudaDataUpdateCoordinator) -> None: """Initial (empty) data.""" + self.coordinator = coordinator self.name: str | None = None self.local_name: str | None = None self.prefname: str | None = None # "preferred" name - ideally local_name self.address: str = address self.ref_power: float = 0 # If non-zero, use in place of global ref_power. self.ref_power_changed: float = 0 # Stamp for last change to ref_power, for cache zapping. - self.options = options self.unique_id: str | None = None # mac address formatted. self.address_type = BDADDR_TYPE_UNKNOWN self.area_id: str | None = None @@ -152,7 +156,7 @@ def set_ref_power(self, new_ref_power: float): # changed due to ref_power), we still call apply so that the new area_distance # gets applied. # if nearest_scanner is not None: - self.apply_scanner_selection(nearest_scanner) + # self.apply_scanner_selection(nearest_scanner) # Update the stamp so that the BermudaEntity can clear the cache and show the # new measurement(s) immediately. self.ref_power_changed = MONOTONIC_TIME() @@ -190,6 +194,33 @@ def apply_scanner_selection(self, closest_scanner: BermudaDeviceScanner | None): self.area_name, ) + def get_point(self): + from .common.point import BermudaPoint # ehhh + return BermudaPoint(self) + + def get_point_fresh(self): + return BermudaPoint.get_fresh() + + def apply_area(self, dist, area, maybe = None): + # _LOGGER.debug("apply_area: %s, %s, %s", dist, area, maybe) + self.maybe_area = maybe + # self.area_id = closest_scanner.area_id + self.area_distance = dist + if self.area_name != area: + self.area_prev = self.area_name + self.area_name = area + # _LOGGER.debug(": %s", got[0][1]) + + # old_area = self.area_name + # if (old_area != self.area_name) and self.create_sensor: + # # Our area has changed! + # _LOGGER.debug( + # "Device %s was in '%s', now '%s'", + # self.name, + # old_area, + # self.area_name, + # ) + def calculate_data(self): """ Call after doing update_scanner() calls so that distances @@ -211,13 +242,13 @@ def calculate_data(self): # Update whether this device has been seen recently, for device_tracker: if ( self.last_seen is not None - and MONOTONIC_TIME() - self.options.get(CONF_DEVTRACK_TIMEOUT, DEFAULT_DEVTRACK_TIMEOUT) < self.last_seen + and MONOTONIC_TIME() - self.coordinator.options.get(CONF_DEVTRACK_TIMEOUT, DEFAULT_DEVTRACK_TIMEOUT) < self.last_seen ): self.zone = STATE_HOME else: self.zone = STATE_NOT_HOME - if self.address.upper() in self.options.get(CONF_DEVICES, []): + if self.address.upper() in self.coordinator.options.get(CONF_DEVICES, []): # We are a device we track. Flag for set-up: self.create_sensor = True @@ -241,7 +272,7 @@ def update_scanner(self, scanner_device: BermudaDevice, discoveryinfo: Bluetooth self.scanners[format_mac(scanner_device.address)] = BermudaDeviceScanner( self, discoveryinfo, # the entire BluetoothScannerDevice struct - self.options, + self.coordinator, scanner_device, ) device_scanner = self.scanners[format_mac(scanner_device.address)] diff --git a/custom_components/bermuda/bermuda_device_scanner.py b/custom_components/bermuda/bermuda_device_scanner.py index 49ef474..1e47162 100644 --- a/custom_components/bermuda/bermuda_device_scanner.py +++ b/custom_components/bermuda/bermuda_device_scanner.py @@ -58,9 +58,10 @@ def __init__( self, parent_device: BermudaDevice, # The device being tracked scandata: BluetoothScannerDevice, # The advertisement info from the device, received by the scanner - options, + coordinator, scanner_device: BermudaDevice, # The scanner device that "saw" it. ) -> None: + self.coordinator = coordinator # I am declaring these just to control their order in the dump, # which is a bit silly, I suspect. self.name: str = scanner_device.name or scandata.scanner.name @@ -72,7 +73,7 @@ def __init__( self.area_name: str | None = scanner_device.area_name self.parent_device = parent_device self.parent_device_address = parent_device.address - self.options = options + self.options = coordinator.options self.stamp: float | None = 0 # Only remote scanners log timestamps, local usb adaptors do not. self.scanner_sends_stamps = isinstance(scanner_device, BaseHaRemoteScanner) diff --git a/custom_components/bermuda/binary_sensor.py b/custom_components/bermuda/binary_sensor.py index e60d0e3..f2055a1 100644 --- a/custom_components/bermuda/binary_sensor.py +++ b/custom_components/bermuda/binary_sensor.py @@ -17,7 +17,7 @@ async def async_setup_entry(hass, entry, async_add_devices): class BermudaBinarySensor(BermudaEntity, BinarySensorEntity): - """bermuda binary_sensor class.""" + """permuda binary_sensor class.""" @property def name(self): diff --git a/custom_components/bermuda/common/__init__.py b/custom_components/bermuda/common/__init__.py new file mode 100644 index 0000000..6f53023 --- /dev/null +++ b/custom_components/bermuda/common/__init__.py @@ -0,0 +1,3 @@ +"""Dummy init so that pytest works.""" + +from __future__ import annotations diff --git a/custom_components/bermuda/common/map.py b/custom_components/bermuda/common/map.py new file mode 100644 index 0000000..046f909 --- /dev/null +++ b/custom_components/bermuda/common/map.py @@ -0,0 +1,953 @@ + +import json +import bisect +import math +import numpy as np +import statistics +import time + +from .point import BermudaPoint + +TOOLS = False +try: + from ..bermuda_device import BermudaDevice + from ..const import ( + _LOGGER, + ) +except ImportError: + TOOLS = True + +NN = True +try: + import torch +except ImportError: + NN = False + +if NN: + dtype = torch.float + device = "cpu" + # device = "cuda" if torch.cuda.is_available() else "cpu" + torch.set_default_device(device) + + NN_COORD_NONE = 100 + NN_AREA_BAD = 0 + NN_AREA_GOOD = 1 + +#sooo annoying +def DBG(fmt : str, *args, **kwargs): + s = fmt.format(*args, **kwargs) + if TOOLS: + print(s) + else: + _LOGGER.debug("%s", s) + +class Tunables(): + def __init__(self) -> None: + # pass + # self.value = {} + self.names = list() + self.rmin = {} + self.rmax = {} + + def setup(self, name: str, val: float, rmin: float, rmax: float): + # self.value[name] = val + setattr(self, 'name', val) + self.names.append(name) + self.rmin[name] = rmin + self.rmax[name] = rmax + + def print(self): + print('\n\nTUNABLES:') + for n in self.names: + print('self.tune.' + n + ' =', getattr(self, n)) + print('\nTUNABLES END\n') + +class BermudaMap(): + def __init__(self, data: dict) -> None: + + # self.metric = 'dist' + self.metric = 'dist_raw' + + self.areas = set() + self.scanners = set() + self.coords = ('',) + self.skip_coord = None + + self.stat_types = ['pure', 'dirty'] + self.stat_values = ['good', 'bad', 'miss'] + + self.tune = Tunables() + self.tune.setup('neighboor_count', 14, 1.1, 20.0) + self.tune.setup('coord_missmatch_dist', 3.56, 0.0, 20.0) + self.tune.setup('order_value', 4.992, 0.0, 10.0) + self.tune.setup('order_power', 1.337, 0.1, 20.0) + self.tune.setup('count_value', 1.783, 0.1, 10.0) + self.tune.setup('distance_value', 1.0, 0.0, 10.0) + self.tune.setup('distance_range', 4.98, 0.001, 10.0) + self.tune.setup('value_pass_factor', 2.688, 0.9, 8.0) + + #dist + self.tune.neighboor_count = 20 + self.tune.coord_missmatch_dist = 3.82 + self.tune.coord_value = 10.0 + self.tune.order_value = 5.585 + self.tune.order_power = 3.746 + self.tune.count_value = 1.388 + self.tune.distance_value = 4.0 #dist with exp + self.tune.distance_range = 6.175 + self.tune.value_pass_factor = 3 + + #dist_raw + self.tune.neighboor_count = 20.0 + self.tune.coord_missmatch_dist = 9.451926088318196 + self.tune.order_value = 5.585 + self.tune.order_power = 3.746 + self.tune.count_value = 1.388 + self.tune.distance_value = 4.0 + self.tune.distance_range = 1.611654243658002 + self.tune.value_pass_factor = 3.0457876605073597 + + self.tune.neighboor_count = 20.0 + self.tune.coord_missmatch_dist = 9.451926088318196 + self.tune.order_value = 1.1474162757421533 + self.tune.order_power = 3.746 + self.tune.count_value = 1.388 + self.tune.distance_value = 4.0 + self.tune.distance_range = 2.4884801217148764 + self.tune.value_pass_factor = 4.89359729039503 + + self.tune.neighboor_count = 20.0 + self.tune.coord_missmatch_dist = 9.451926088318196 + self.tune.order_value = 1.1474162757421533 + self.tune.order_power = 3.746 + self.tune.count_value = 1.388 + self.tune.distance_value = 4.0 + self.tune.distance_range = 3.1549473112242126 + self.tune.value_pass_factor = 4.89359729039503 + + self.tune.neighboor_count = 20.0 + self.tune.coord_missmatch_dist = 9.451926088318196 + self.tune.order_value = 1.1474162757421533 + self.tune.order_power = 3.746 + self.tune.count_value = 1.388 + self.tune.distance_value = 4.0 + self.tune.distance_range = 1.9895264641403068 + self.tune.value_pass_factor = 2.2341684935797055 + + # self.tune.coord_missmatch_dist = 20.0 #hmm + # self.tune.distance_range = 0.535 + # self.tune.value_pass_factor = 1.944 + + # self.tune.coord_missmatch_dist = 20.0 #for map vis + # self.tune.distance_range = 0.535 + # self.tune.value_pass_factor = 1.05 + + self.load(data) + + def load(self, ret: dict): + self._area_points = {} + for area, points in ret.items(): + self.areas.add(area) + self._area_points[area] = [] + for p in points: + bp = BermudaPoint(p) + self.scanners.update(bp.get_scanners()) + self._area_points[area].append(bp) + self.coords = tuple(self.scanners) + self.nn_areas = tuple(self.areas) + # self.bake() + # self.calc_stats() + + def to_dict(self) -> dict: + ret = {} + for area, points in self._area_points.items(): + ret[area] = [] + for bp in points: + ret[area].append(bp.to_dict()) + return ret + + def clear_all(self, area: str): + self._area_points = {} + + def clear_area(self, area: str): + self._area_points[area] = [] + + + if not TOOLS: + def map_point(self, device: BermudaDevice, point: BermudaPoint): + point = point.fresh_cut() + coord = self.get_coord(point) + _LOGGER.debug(" CO: %s", coord) + dbg = False + # dbg = True + ret = self._map_point(coord, device.area_name, dbg = dbg) + _LOGGER.debug(" RET: %s", ret) + return ret + def value_point(self, device: BermudaDevice, point: BermudaPoint): + point = point.fresh_cut() + coord = self.get_coord(point) + return self._point_probs(coord) + + + def _map_point(self, coord: tuple, prev_area: str | None = None, dbg = False): + + # return self._probs_detect(self._point_probs(coord), dbg = dbg) + + # bp = self.get_bp(coord) + # if len(bp.area) > 0: + # _LOGGER.debug("REF: %s", point.to_dict()) + # _LOGGER.debug(" CO: %s", coord) + # # Counter(got) + # times = bp.area + # _LOGGER.debug(" times: %s", times) + # times_sort = sorted(times.items(), key=lambda item: item[1], reverse=False) + # _LOGGER.debug(" times_sort: %s", times_sort) + + if dbg: + DBG("CO: {}", coord) + + got = [] + for bp in self.baked.values(): + if self.skip_coord is not None and bp.coord == self.skip_coord: + continue + d = self._dist(coord, bp.coord) + if d is None: + continue + # _LOGGER.debug(" cmp %f for point %s", d, p.to_dict()) + if not got or d < got[-1][0]: + # bisect.insort(got, ("key": 2), key=lambda x: x["key"]) + if len(got) >= int(self.tune.neighboor_count): + got.pop(-1) + bisect.insort(got, (d, bp)) + + value = {} + for i in range(len(got)): + g = got[i] + if dbg: + DBG(" GOT: {} : {}", g[0], g[1].area) + pts = sum(g[1].area.values()) + + # goes from tune_order_value to 1.0 according to power (at 1.0 linearly), unless I screwed up;p + if int(self.tune.neighboor_count) > 1: + oi = pow(1.0 - (i / (int(self.tune.neighboor_count) - 1)), self.tune.order_power) + ov = 1 + oi * (self.tune.order_value - 1) + if dbg: + DBG(" oi {} ov: {}", oi, ov) + del oi #python's crap + else: + ov = self.tune.order_value + + for area in g[1].area.keys(): + if area not in value: + value[area] = 0 + + c = g[1].area[area] + + # dv = 1 / (1 + g[0]) + # dv *= self.tune.distance_value + dv = self.tune.distance_value * math.exp(-g[0]*g[0] / (self.tune.distance_range*self.tune.distance_range)) + value[area] += ov + (dv + (c / pts) * self.tune.count_value) + + if dbg: + DBG(" value: {}", value) + + value_sort = sorted(value.items(), key=lambda item: item[1], reverse=True) + + if dbg: + DBG("value_sort: {}", value_sort) + + if len(value_sort) == 1: + return { 'dist': value_sort[0][1], 'area': value_sort[0][0] } + if len(value) >= 2: + if value_sort[0][1] / (value_sort[1][1] + 0.00001) >= self.tune.value_pass_factor: + return { 'dist': value_sort[0][1], 'area': value_sort[0][0], 'maybe': value_sort[1][0] } + return None + + def _point_probs(self, coord: tuple, dbg = False): + ret = {} + for area in self.areas: + ret[area] = 0 + for bp in self.baked.values(): + if self.skip_coord is not None and bp.coord == self.skip_coord: + continue + d = self._dist(coord, bp.coord) + if d is None: + continue + dv = self._dist2value(d) + # _LOGGER.debug(" cmp %f for point %s", d, p.to_dict()) + for area in bp.area.keys(): + tmp = dv * bp.area[area] + ret[area] += tmp + + for area in ret.keys(): + pass + # ret[area] *= self.area_mean[area] + # ret[area] /= self.area_mean[area] + # ret[area] *= self.area_mean_mean / self.area_mean[area] + # ret[area] /= self.area_mean_mean / self.area_mean[area] + # ret[area] /= self.area_mean[area]**0.1 + + # ret = sorted(ret.items(), key=lambda item: item[1], reverse=True) + return ret + + def _probs_detect(self, probs: dict, dbg = False): + ret = {} + + psort = sorted(probs.items(), key=lambda item: item[1], reverse=True) + + if len(psort) == 1: + return { 'dist': psort[0][1], 'area': psort[0][0] } + if len(psort) >= 2: + if psort[0][1] / (psort[1][1] + 0.00001) >= self.tune.value_pass_factor: + return { 'dist': psort[0][1], 'area': psort[0][0], 'maybe': psort[1][0] } + return None + + + def bulid_coord(self, p: dict) -> tuple: + coords = []#there probably is something more efficient? + for scanner in self.coords: + if scanner in p.data: + coords.append(p.data[scanner][self.metric]) + else: + coords.append(None) + return tuple(coords) + + def get_coord(self, p: BermudaPoint) -> tuple: + p = p.fresh_cut() + coords = []#there probably is something more efficient? + for scanner in self.coords: + if scanner in p.data: + coords.append(p.data[scanner][self.metric]) + else: + coords.append(None) + return tuple(coords) + + def _dist(self, c0, c1) -> float: + dist = 0.0 + count = 0 + #TODO value short distances more since they're more reliable? + #TODO instead of coord missmatch distance, it'd be better to just value coord matches? + + for i in range(len(self.coords)): + if c0[i] is not None and c1[i] is not None: + d = c0[i] - c1[i] + dist += d * d + count += 1 + elif c0[i] is not None or c1[i] is not None: + d = self.tune.coord_missmatch_dist + dist += d * d + if count: + return math.sqrt(dist) + return None + + def _dist2value(self, d: float) -> float: + # dv = 1 / (1 + d / self.tune.distance_range) + # dv = 1 / (1 + d**2 / self.tune.distance_range) + dv = math.exp(-d / self.tune.distance_range) + # dv /= 0.2 * self.tune.distance_range**2 #normalize a bit so the graph image behaves better + # dv = math.exp(-d**2 / (self.tune.distance_range**2)) + dv *= self.tune.distance_value + return dv + + def get_bp(self, coord): + if coord in self.baked: + return self.baked[coord] + bp = BermudaBakedPoint(coord) + self.baked[coord] = bp + return bp + + # def update(existing_aggregate, new_value): + # (count, mean, M2) = existing_aggregate + # count += 1 + # delta = new_value - mean + # mean += delta / count + # delta2 = new_value - mean + # M2 += delta * delta2 + # return (count, mean, M2) + + # def finalize(existing_aggregate): + # (count, mean, M2) = existing_aggregate + # if count < 2: + # return float("nan") + # else: + # (mean, variance, sample_variance) = (mean, M2 / count, M2 / (count - 1)) + # return (mean, variance, sample_variance) + + def bake(self): + self.baked = {} + # self.baked = [] + for area, points in self._area_points.items(): + for p in points: + p = p.fresh_cut() + coord = self.get_coord(p) + + bp = self.get_bp(coord) + + if area not in bp.area: + bp.area[area] = 0 + bp.area[area] += 1 + + #in the end we need to bake into somekindof actual map/grid (something like what the graph update_mesh() looks like) + #but given the expected large dimensionality and sparcity it's probably best to have a few maps for whatever combinations of coordinates happen in practice? + # on the plus side rssi/dist_raw is horribly discrete so it's very little data no matter what;p + + self.bake_mean() + + def bake_mean(self): + self.area_count = {} + self.area_mean = {} + fu = list(self.baked.values()) + n = len(fu) + for area in self.areas: + mean = 0 + count = 0 + for i in range(n): + bp = fu[i] + if area not in bp.area: + continue + + for j in range(i+1, n): + # for j in range(n): + # if i == j: + # continue + bp1 = fu[j] + if area not in bp1.area: + continue + d = self._dist(bp.coord, bp1.coord) + if d is not None: + dv = self._dist2value(d) + # mean += dv * 1 + # count += 1 + c = bp.area[area] + bp1.area[area] + mean += dv * c + count += c + + self.area_count[area] = count + self.area_mean[area] = mean / count if count else 0 + DBG('MEAN {} COUNT {} for {}', self.area_mean[area], count, area) + + if self.area_count: + self.area_count_mean = statistics.mean(self.area_count.values()) + self.area_mean_mean = statistics.mean(self.area_mean.values()) + else: + self.area_count_mean = 0 + self.area_mean_mean = 0 + DBG('MEAN MEAN {} COUNT {}', self.area_mean_mean, self.area_count_mean) + # for area in self.areas: + # self.area_mean[area] = self.area_mean_mean / self.area_mean[area] + # DBG('MEAN {} for {}', self.area_mean[area], area) + + def debug_point(self, bp): + DBG('FUUUU START:') + # ret = self._map_point(bp.coord, dbg = True) + DBG(" CO: {}", bp.coord) + ret = self._point_probs(bp.coord, dbg = True) + DBG(" RET: {} for: {}", ret, bp.sorted_areas()) + DBG('FUUUU END') + + def calc_stats(self): + self.stats = {} + for area in self.areas: + self.stats[area] = {} + self.stats[area]['count pure distinct'] = 0 + self.stats[area]['count pure total'] = 0 + self.stats[area]['count dirty distinct'] = 0 + self.stats[area]['count dirty total'] = 0 + + for typ in self.stat_types: + for sv in self.stat_values: + self.stats[area][typ+' '+sv] = 0 + + for bp in self.baked.values(): + self.skip_coord = bp.coord + + sa = bp.sorted_areas() + if len(sa) == 1: + typ = 'pure' + self.stats[area]['count pure distinct'] += 1 + self.stats[area]['count pure total'] += sa[0][1] + else: + typ = 'dirty' + for area, count in sa: + self.stats[area]['count dirty distinct'] += 1 + self.stats[area]['count dirty total'] += count + + area = sa[0][0] + + ret = self._map_point(bp.coord) + + if ret is None: + self.stats[area][typ + ' miss'] += sa[0][1] + else: + if area == ret['area']: + self.stats[area][typ + ' good'] += sa[0][1] + else: + self.stats[area][typ + ' bad'] += sa[0][1] + # if area == 'corridor': + # self.debug_point(bp) + # for area in bp.sorted_areas(): + # if area == ret['area']: + # self.stats[area] + # print(self.stats) + DBG(self.stats_str()) + self.skip_coord = None + return + + def calc_stats2(self): + self.stats = {} + for area in self.areas: + self.stats[area] = {} + self.stats[area]['count pure distinct'] = 0 + self.stats[area]['count pure total'] = 0 + self.stats[area]['count dirty distinct'] = 0 + self.stats[area]['count dirty total'] = 0 + self.stats[area]['mse'] = 0 + self.stats[area]['dirty mse'] = 0 + + for typ in self.stat_types: + for sv in self.stat_values: + self.stats[area][typ+' '+sv] = 0 + + for bp in self.baked.values(): + full = self._point_probs(bp.coord) + self.skip_coord = bp.coord + skip = self._point_probs(bp.coord) #TODO super stupidly redundant + + ret = self._probs_detect(skip) + + sa = bp.sorted_areas() + area = sa[0][0] + + # if area == 'kitchen': + # self.debug_point(bp) + if len(sa) == 1: + typ = 'pure' + self.stats[area]['count pure distinct'] += 1 + self.stats[area]['count pure total'] += sa[0][1] + if ret is None: + self.stats[area][typ + ' miss'] += sa[0][1] + else: + if area == ret['area']: + self.stats[area][typ + ' good'] += sa[0][1] + else: + self.stats[area][typ + ' bad'] += sa[0][1] + else: + # print(skip) + typ = 'dirty' + # totre = sum(c for _, c in skip) + totre = sum(skip.values()) + totbp = sum(c for _, c in sa) + se = 0 + re = dict(skip) + for area, count in sa: + self.stats[area]['count dirty distinct'] += 1 + self.stats[area]['count dirty total'] += count + if ret is None: + self.stats[area][typ + ' miss'] += count + else: + if area == ret['area']: + self.stats[area][typ + ' good'] += count + else: + self.stats[area][typ + ' bad'] += count + self.stats[area]['dirty mse'] += math.sqrt(se) + + # totfull = sum(c for _, c in full) + # totskip = sum(c for _, c in skip) + # se = 0 + # re = dict(skip) + # for area, count in full: + # e = count / totfull - re[area] / totskip + # se += e*e + # self.stats[area]['mse'] += math.sqrt(se) + + # if area == 'basement': + # self.debug_point(bp) + # DBG('full {}', full) + # DBG('skip {}', skip) + # print(self.stats) + DBG(self.stats_str()) + self.skip_coord = None + return + + + def stats_str(self) -> str: + ret = '' + # ret = 'tune_neighboor_count: {}\n'.format(self.tune.neighboor_count) + gs = {} + for v in self.stat_values: + for typ in self.stat_types: + gs[typ+' '+v] = 0 + + gs['mse'] = 0 + for area, st in self.stats.items(): + ret += area.ljust(32) + # ret += '\tpure {:>4} / {:<4}'.format(st['count pure distinct'], st['count pure total']) + # ret += '\tdirty {:>4} / {:<4}'.format(st['count dirty distinct'], st['count dirty total']) + # ret += '\t' + str(round(self.stats[area]['mse'], 4)).rjust(5) + ' mse' + for typ in self.stat_types: + ret += '\t{:>5} {:>4} / {:<4}'.format(typ, st['count '+typ+' distinct'], st['count '+typ+' total']) + ret += '\n' + # tot = sum(st.values()) + # gs['mse'] += st['mse'] + for v in self.stat_values: + ret += v.rjust(12) + for typ in self.stat_types: + tot = self.stats[area]['count '+typ+' total'] + sv = typ+' '+v + + if tot: + fl = st[sv] / tot + else: + fl = 0 + ret += ' ' + str(round(100 * fl, 1)).rjust(4) + ' %' + gs[sv] += fl + # if 'dirty mse' in self.stats[area]: + # ret += ' ' + str(round(self.stats[area]['dirty mse'], 4)).rjust(5) + ' mse' + ret += '\n' + + tot = len(self.stats) + ret += 'global avg:' + # ret += '\t\t\t\t' + str(round(self.stats[area]['mse'], 4)).rjust(5) + ' mse' + ret += '\n' + for v in self.stat_values: + ret += v.rjust(10) + for typ in self.stat_types: + sv = typ+' '+v + ret += ' ' + str(round(100 * gs[sv] / tot, 2)).rjust(5) + ' %' + # if 'dirty mse' in gs: + # ret += ' ' + str(round(gs['dirty mse'], 4)).rjust(5) + ' mse' + ret += '\n' + return ret + + + if NN: + def _coord2tens(self, coord): + return torch.Tensor([NN_COORD_NONE if c is None else c for c in coord]) + + def bake_nn(self): + # x = torch.tensor([[0.1, 1.2], [2.2, 3.1], [4.9, 5.2]]) + # x = torch.tensor([]) + # y = torch.tensor([]) + # x = torch.empty((len(self.baked), len(self.coords))) + x = torch.full((len(self.baked), len(self.coords)), NN_COORD_NONE, dtype=dtype) + y = torch.full((len(self.baked), len(self.nn_areas)), NN_AREA_BAD, dtype=dtype) + + i = 0 + for bp in self.baked.values(): + sa = bp.sorted_areas() + + x[i] = self._coord2tens(bp.coord) + tmp = [NN_AREA_BAD] * len(self.nn_areas) + + if len(sa) == 1: + area = sa[0][0] + + tmp[self.nn_areas.index(area)] = NN_AREA_GOOD + # y.append(tmp) + else: + tot = sum(c for _, c in sa) + for area, count in sa: + tmp[self.nn_areas.index(area)] = NN_AREA_BAD + (NN_AREA_GOOD - NN_AREA_BAD) / tot + y[i] = torch.Tensor(tmp) + i += 1 + + print('X=', x) + print('Y=', y) + self.model = torch.nn.Sequential( + # torch.nn.Linear(len(self.coords), len(self.areas)), + + torch.nn.Linear(len(self.coords), len(self.coords) * 2), + # torch.nn.Tanh(), + # torch.nn.ReLU(), + # torch.nn.CELU(), + # torch.nn.Sigmoid(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.ReLU(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.ReLU(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.ReLU(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Sigmoid(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Sigmoid(), + torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Tanh(), + torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Tanh(), + torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Tanh(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Tanh(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Tanh(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Tanh(), + # torch.nn.Linear(len(self.coords) * 100, len(self.coords) * 100), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), torch.nn.Sigmoid(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.ReLU(), + # # torch.nn.Sigmoid(), + # torch.nn.Linear(len(self.coords) * 2, len(self.coords) * 2), + # torch.nn.Sigmoid(), + torch.nn.Linear(len(self.coords)*2, len(self.areas)), + + # torch.nn.Linear(len(self.coords), len(self.coords) * 100), + # torch.nn.Linear(len(self.coords) * 100, len(self.coords) * 100), torch.nn.Tanh(), + # torch.nn.Linear(len(self.coords)*100, len(self.areas)), + + torch.nn.Sigmoid(), + # torch.nn.ReLU(), + ) + + loss_fn = torch.nn.MSELoss(reduction='sum') + optimizer = torch.optim.RMSprop(self.model.parameters(), lr=1e-3) + + t_end = time.time() + 15 + prev_loss = 0 + meh_count = 0 + i = 0 + while time.time() < t_end: + y_pred = self.model(x) + + loss = loss_fn(y_pred, y) + if i % 100 == 0: + print("AAAAAA", i, loss.item()) + + if abs(loss.item() - prev_loss) / loss.item() >= 1.0 / 1000.0: + meh_count = 0 + else: + meh_count += 1 + if i >= 1000 and meh_count > 10: + print("loss.item()", loss.item()) + print("prev_loss()", prev_loss) + print("abs(loss.item() - prev_loss)", abs(loss.item() - prev_loss)) + print("no progress", i, loss.item()) + break + + + prev_loss = loss.item() + # Before the backward pass, use the optimizer object to zero all of the + # gradients for the variables it will update (which are the learnable + # weights of the model). This is because by default, gradients are + # accumulated in buffers( i.e, not overwritten) whenever .backward() + # is called. Checkout docs of torch.autograd.backward for more details. + optimizer.zero_grad() + + # Backward pass: compute gradient of the loss with respect to model + # parameters + loss.backward() + + # Calling the step function on an Optimizer makes an update to its + # parameters + optimizer.step() + i += 1 + print(loss.item()) + + def _map_point_nn(self, coord): + x = self._coord2tens(coord) + y = self.model(x) + + if NN_AREA_GOOD > NN_AREA_BAD: + ind = torch.argmax(y).item() + else: + ind = torch.argmin(y).item() + # print('ind=', ind) + # print('y[ind]=', y[ind].item()) + # for i, v in enumerate(y): + + ret = { + # 'dist': torch.index_select(y, 0, ind).item(), + 'dist': y[ind].item(), + 'area': self.nn_areas[ind] + } + + for i, o in enumerate(y): + # print(abs(y[ind].item()) / (abs(o.item()) + 0.00001)) + if i != ind and abs(y[ind].item()) / (abs(o.item()) + 0.00001) < self.tune.value_pass_factor: + ret = None + + return ret + + def calc_stats_nn(self): + self.stats = {} + for area in self.areas: + self.stats[area] = {} + self.stats[area]['count pure distinct'] = 0 + self.stats[area]['count pure total'] = 0 + self.stats[area]['count dirty distinct'] = 0 + self.stats[area]['count dirty total'] = 0 + + for typ in self.stat_types: + for sv in self.stat_values: + self.stats[area][typ+' '+sv] = 0 + + for bp in self.baked.values(): + sa = bp.sorted_areas() + if len(sa) == 1: + typ = 'pure' + self.stats[area]['count pure distinct'] += 1 + self.stats[area]['count pure total'] += sa[0][1] + else: + typ = 'dirty' + for area, count in sa: + self.stats[area]['count dirty distinct'] += 1 + self.stats[area]['count dirty total'] += count + + area = sa[0][0] + + ret = self._map_point_nn(bp.coord) + + if ret is None: + self.stats[area][typ + ' miss'] += sa[0][1] + else: + if area == ret['area']: + self.stats[area][typ + ' good'] += sa[0][1] + else: + self.stats[area][typ + ' bad'] += sa[0][1] + # if area == 'corridor': + # self.debug_point(bp) + # for area in bp.sorted_areas(): + # if area == ret['area']: + # self.stats[area] + # print(self.stats) + DBG(self.stats_str()) + self.skip_coord = None + return + + +class BermudaBakedPoint(): + def __init__(self, coord) -> None: + self.coord = coord + self.area = {} + + def __lt__(self, other): + for i in range(len(self.coord)): + if self.coord[i] is not None: + if other.coord[i] is None: + return True + elif self.coord[i] < other.coord[i]: + return True + elif other.coord[i] is not None: + return False + return False + + def sorted_areas(self): + return sorted(self.area.items(), key=lambda item: item[1], reverse=True) + + + +class BermudaMapTrack(): + def __init__(self, bmap: BermudaMap) -> None: + self.bmap = bmap + self.time_raw = 0.0 + self.time = 0.0 + + self.value_raw = {} + self.value_raw_hist = {} + self.value_min = {} + self.value_max = {} + self.value = {} + self.alpha = 0.8 + self.value_s0 = {} + self.alpha_s0 = 0.5 + + + self.area = None + + if not TOOLS and True: + def map_point(self, device: BermudaDevice, point: BermudaPoint): + point = point.fresh_cut() + + self.time_raw = time.time() + self.value_raw = self.bmap.value_point(device, point) + + # ret = device.maptrack.value_point(device, point) + # for area, value in ret.items(): + # device.value_raw[area] = self.value_raw + + # self.value_raw_hist.pop(0) + # self.value_raw_hist.append(self.value_raw) + + sv = 0.00001 + + DBG('self.time_raw = {}', self.time_raw) + + #TODO make it more 'pythonic'... ehh... + + for a, vr in self.value_raw.items(): + if a not in self.value_s0: + self.value_s0[a] = 0.0 + self.value_s0[a] *= 1.0 - self.alpha_s0 + self.value_s0[a] += vr * self.alpha_s0 + + if a not in self.value_raw_hist: + self.value_raw_hist[a] = [] + if len(self.value_raw_hist[a]) >= 3: + self.value_raw_hist[a].pop(0) + self.value_raw_hist[a].append(vr) + + self.value_min[a] = min(self.value_raw_hist[a]) + self.value_max[a] = max(self.value_raw_hist[a]) + + if a == self.area: + self.value[a] = self.value_max[a] + else: + self.value[a] = self.value_min[a] + + # for a, vr in self.value_raw.items():#make it more 'pythonic'... + # if a not in self.value: + # self.value[a] = 0.0 + # vrpp = self.value_rawpp.get(a, 0.0) + # vrp = self.value_rawp.get(a, 0.0) + + # self.value_s0[a] *= 1.0 - self.alpha_s0 + # self.value_s0[a] += vr * self.alpha_s0 + + # v = self.value.get(a, 0.0) + + # if abs(self.value_s0[a] - v) / (v+sv) >= 0.5: + # a = self.alpha + # self.value[a] *= 1.0 - self.alpha + # self.value[a] += vr * self.alpha + # continue + + # self.value[a] *= 1.0 - self.alpha + # self.value[a] += v * self.alpha + + + # coord = self.bmap.get_coord(point) + + psort = sorted(self.value.items(), key=lambda item: item[1], reverse=True) + + if len(psort) == 1: + self.area = psort[0][0] + device.apply_area(psort[0][1], self.area) + if len(psort) >= 2: + if (psort[0][1] + sv) / (psort[1][1] + sv) >= self.bmap.tune.value_pass_factor: + self.area = psort[0][0] + device.apply_area(psort[0][1], self.area, psort[1][0]) + # return { 'dist': psort[0][1], 'area': psort[0][0], 'maybe': psort[1][0] } + # return + # # _LOGGER.debug(" CO: %s", coord) + # dbg = False + # dbg = True + # ret = self.bmap._map_point(coord, device.area_name, dbg = dbg) + # _LOGGER.debug(" RET: %s", ret) + # return ret + + if not TOOLS and False: + def map_point(self, device: BermudaDevice, point: BermudaPoint): + # _LOGGER.debug("REF full: %s", point.to_dict()) + point = point.fresh_cut() + # _LOGGER.debug("REF fresh: %s", point.to_dict()) + + self.value_raw = self.bmap.value_point(device, point) + + # ret = self._probs_detect(self.value_raw) + ret = self.bmap.map_point(device, point) + + # _LOGGER.debug("GOT: %s", ret) + if isinstance(ret, dict) and 'dist' in ret and 'area' in ret: + device.apply_area(**ret) + + + + diff --git a/custom_components/bermuda/common/point.py b/custom_components/bermuda/common/point.py new file mode 100644 index 0000000..3177890 --- /dev/null +++ b/custom_components/bermuda/common/point.py @@ -0,0 +1,127 @@ + +import bisect +import math +import json + +TOOLS = False +try: + from ..bermuda_device import BermudaDevice + from ..const import ( + _LOGGER, + ) + from ..util import rssi_to_metres +except ImportError: + TOOLS = True + +class BermudaPoint(): + def __init__(self, + smth#: BermudaDevice, # The device/beacon for which we capture this point + ) -> None: + if not TOOLS: + if isinstance(smth, BermudaDevice): + self._init(smth) + return + + if isinstance(smth, dict): + self.from_dict(smth) + else: + self.data = {} + + if not TOOLS: + def _init(self, beacon: BermudaDevice) -> None: + _LOGGER.debug("device: %s", beacon.name) + self.beacon_address = beacon.address + self.data = {} + for scanner in beacon.scanners.values(): + _LOGGER.debug(" scanner.scanner_device.name: %s", scanner.scanner_device.name) + # self.data[scanner.scanner_device.name] = scanner.rssi_distance + self.data[scanner.scanner_device.name] = { + 'stamp' : scanner.stamp, + 'dist' : scanner.rssi_distance, + 'dist_raw' : scanner.rssi_distance_raw, + 'rssi' : scanner.rssi, + } + + @classmethod + def get_fresh(self, beacon: BermudaDevice):# -> BermudaPoint: #seriously wtf? + ret = BermudaPoint() + ret.beacon_address = beacon.address + + stamp = 0.0 + for scanner in beacon.scanners.values(): + stamp = max(stamp, scanner.stamp) + + ret.data = {} + for scanner in beacon.scanners.values(): + if stamp - scanner.stamp <= 0.5: + ret.data[scanner.scanner_device.name] = { + 'stamp' : scanner.stamp, + 'dist' : scanner.rssi_distance, + 'dist_raw' : scanner.rssi_distance_raw, + 'rssi' : scanner.rssi, + } + return ret + + def fresh_cut(self, time_cut: float = 0.2): + ms = 0.0 + for name in self.data.keys(): + ms = max(ms, self.data[name]['stamp']) + + ret = BermudaPoint(self.to_dict()) + for name in self.data.keys(): + if ms - self.data[name]['stamp'] >= time_cut: + ret.data.pop(name) + + return ret + + def from_dict(self, d) -> None: + self.data = d.copy() + def to_dict(self) -> dict: + return self.data.copy() + + def get_scanners(self) -> set: + return set(self.data.keys()) + + def get(self, scanner: str, measure: str) -> float | None: + if scanner in self.data: + if measure in self.data[scanner]: + return self.data[scanner][measure] + return None + + def dist(self, p#: BermudaPoint # welp, this has felt like pulling teeth, now I'm certain of it... + ): + dist = 0.0 + count = 0 + # if not TOOLS: + # _LOGGER.debug(" self.data %s", self.data) + # _LOGGER.debug(" p.data %s", p.data) + m = 'dist' + for name in self.data.keys(): + if self.data[name] is None or m not in self.data[name] or self.data[name][m] is None: + continue + if name in p.data: + if p.data[name] is None or m not in p.data[name] or p.data[name][m] is None: + continue + # dist += abs(p1[name] - self.data[name]) + # d = p.data[name] - self.data[name] + d = p.data[name][m] - self.data[name][m] + dist += d * d + count += 1 + if count: + dist = math.sqrt(dist) + # return dist / count + return dist + return None + + # out = {} + # for var, val in vars(self).items(): + # if var == "scanners": + # scanout = {} + # for address, scanner in self.scanners.items(): + # scanout[address] = scanner.to_dict() + # # FIXME: val is overwritten + # val = scanout # noqa + # out[var] = val + # return out + + diff --git a/custom_components/bermuda/config_flow.py b/custom_components/bermuda/config_flow.py index e864f9c..9e3a66b 100644 --- a/custom_components/bermuda/config_flow.py +++ b/custom_components/bermuda/config_flow.py @@ -11,6 +11,7 @@ from homeassistant.core import callback from homeassistant.helpers import device_registry as dr from homeassistant.helpers.selector import ( + AreaSelector, DeviceSelector, DeviceSelectorConfig, ObjectSelector, @@ -24,6 +25,7 @@ ADDR_TYPE_IBEACON, ADDR_TYPE_PRIVATE_BLE_DEVICE, BDADDR_TYPE_PRIVATE_RESOLVABLE, + CONF_BMAP, CONF_ATTENUATION, CONF_DEVICES, CONF_DEVTRACK_TIMEOUT, @@ -130,6 +132,8 @@ def __init__(self, config_entry: BermudaConfigEntry) -> None: self.devices: dict[str, BermudaDevice] self._last_ref_power = None self._last_device = None + self._last_area = None + self._last_clear_points = False self._last_scanner = None self._last_attenuation = None self._last_scanner_info = None @@ -188,6 +192,8 @@ async def async_step_init(self, user_input=None): # pylint: disable=unused-argu "selectdevices": "Select Devices", "calibration1_global": "Calibration 1: Global", "calibration2_scanners": "Calibration 2: Scanner RSSI Offsets", + "calibration11_map": "Calibration 1: Map Management", + "calibration3_area_map": "Calibration 3: Area Mapping", }, description_placeholders=messages, ) @@ -554,6 +560,139 @@ async def async_step_calibration2_scanners(self, user_input=None): description_placeholders={"suffix": results_str}, ) + async def async_step_calibration11_map(self, user_input=None): + area_points = self.coordinator.bmap._area_points + + if user_input is not None: + if user_input["clear"]: + for name, points in area_points.items(): + area_points[name] = [] + else: + for name, points in area_points.items(): + if "clear " + name in user_input and user_input["clear " + name]: + area_points[name] = [] + + self.coordinator.bmap._area_points = area_points + + if user_input[CONF_SAVE_AND_CLOSE]: + self.coordinator.bmap_save() + self.options.update({ CONF_BMAP: self.coordinator.options[CONF_BMAP] }) + # self.options.update({CONF_BMAP: area_points }) + return await self._update_options() + + data_schema = { + vol.Optional("clear", default = False) : vol.Coerce(bool) + } + for name, points in area_points.items(): + data_schema[vol.Optional("clear " + name, default = False)] = vol.Coerce(bool); + + data_schema[vol.Optional(CONF_SAVE_AND_CLOSE, default=False)] = vol.Coerce(bool); + # self.options.update({CONF_BMAP: area_points }) + + stats = "| Area | Point Count |\n|---|---|" + for name, points in area_points.items(): + stats += f"\n| {name} | {len(points)} |" + + return self.async_show_form( + step_id="calibration11_map", + data_schema=vol.Schema(data_schema), + description_placeholders={"suffix": stats}, + ) + + + async def async_step_calibration3_area_map(self, user_input=None): + CONF_AREA = "area" + CONF_DEVICE = "device" + CONF_CLEAR_POINTS = "clear_points" + + if user_input is not None: + self._last_area = user_input[CONF_AREA] + self._last_device = user_input[CONF_DEVICE] + + self._last_clear_points = user_input[CONF_CLEAR_POINTS] + #TODO handle + + self.coordinator.cal = True + self.coordinator.cal_area = user_input[CONF_AREA] + self.coordinator.cal_device = self._get_bermuda_device_from_registry(user_input[CONF_DEVICE]) + self.coordinator.cal_device_addr = self.coordinator.cal_device.address + + # self.options.update({CONF_AREA: user_input[CONF_AREA]}) + # self.options.update({CONF_DEVICE: user_input[CONF_DEVICE]}) + + if self.coordinator.cal: + return await self.async_step_calibration31_calibrating(user_input) + return self.async_show_menu( + step_id="calibration31_calibrating", + menu_options={ + "calibration31_calibrating": "Refresh", + "calibration31_finish": "Finish", + "calibration31_abort": "Abort", + } + ) + + data_schema = { + vol.Required( + CONF_DEVICE, + default=self._last_device if self._last_device is not None else vol.UNDEFINED, + ): DeviceSelector(DeviceSelectorConfig(integration=DOMAIN)), + vol.Required( + CONF_AREA, + default=self._last_area if self._last_area is not None else vol.UNDEFINED, + ): AreaSelector(), + vol.Optional(CONF_CLEAR_POINTS, default=self._last_clear_points): vol.Coerce(bool), + } + + return self.async_show_form( + step_id="calibration3_area_map", + data_schema=vol.Schema(data_schema), + description_placeholders={"suffix": "After you click Submit, point capture will start."}, + ) + + async def async_step_calibration31_calibrating(self, user_input=None): + if not self.coordinator.cal: + return self.async_create_entry(title=NAME)#abort/error? + + #TODO make closing also abort!!! currently capture will be stuck on...!!! + + area_points = self.coordinator.bmap._area_points + + messages = {} + messages["area"] = self.coordinator.cal_area + messages["device"] = self.coordinator.cal_device.prefname + + if self._last_area not in area_points: + messages["count"] = "0" + messages["point"] = "" + else: + messages["count"] = f"{len(area_points[self._last_area])}" + if area_points[self._last_area]: + messages["point"] = f"{area_points[self._last_area][-1].to_dict()}" + else: + messages["point"] = "" + + return self.async_show_menu( + step_id="calibration31_calibrating", + menu_options={ + "calibration31_calibrating": "Refresh", + "calibration31_finish": "Finish", + "calibration31_abort": "Abort", + }, + description_placeholders=messages, + ) + + async def async_step_calibration31_finish(self, user_input=None): + self.coordinator.cal = False + self.coordinator.bmap_save() + self.options.update({ CONF_BMAP: self.coordinator.options[CONF_BMAP] }) + return await self._update_options() + + async def async_step_calibration31_abort(self, user_input=None): + self.coordinator.cal = False + #TODO make this actually work... + return self.async_create_entry(title=NAME, data=self.options) + + def _get_bermuda_device_from_registry(self, registry_id: str) -> BermudaDevice | None: """ Given a device registry device id, return the associated MAC address. diff --git a/custom_components/bermuda/const.py b/custom_components/bermuda/const.py index fb003a9..26113b2 100644 --- a/custom_components/bermuda/const.py +++ b/custom_components/bermuda/const.py @@ -8,8 +8,8 @@ from .log_spam_less import BermudaLogSpamLess -NAME = "Bermuda BLE Trilateration" -DOMAIN = "bermuda" +NAME = "Permuda BLE Mapping" +DOMAIN = "permuda" DOMAIN_DATA = f"{DOMAIN}_data" # Version gets updated by github workflow during release. # The version in the repository should always be 0.0.0 to reflect @@ -133,6 +133,8 @@ CONF_SCANNER_INFO = "scanner_info" CONF_RSSI_OFFSETS = "rssi_offsets" +CONF_BMAP = "bmap" + CONF_UPDATE_INTERVAL, DEFAULT_UPDATE_INTERVAL = "update_interval", 10 DOCS[CONF_UPDATE_INTERVAL] = ( "Maximum time between sensor updates in seconds. Smaller intervals", diff --git a/custom_components/bermuda/coordinator.py b/custom_components/bermuda/coordinator.py index 68fe2bb..d5cff65 100644 --- a/custom_components/bermuda/coordinator.py +++ b/custom_components/bermuda/coordinator.py @@ -2,6 +2,8 @@ from __future__ import annotations +import bisect +import math import re from collections.abc import Callable from datetime import datetime, timedelta @@ -53,6 +55,8 @@ from homeassistant.util import slugify from homeassistant.util.dt import get_age, now +from .common.point import BermudaPoint +from .common.map import * from .bermuda_device import BermudaDevice from .const import ( _LOGGER, @@ -62,6 +66,7 @@ BDADDR_TYPE_PRIVATE_RESOLVABLE, BEACON_IBEACON_SOURCE, BEACON_PRIVATE_BLE_SOURCE, + CONF_BMAP, CONF_ATTENUATION, CONF_DEVICES, CONF_DEVTRACK_TIMEOUT, @@ -131,6 +136,12 @@ def __init__( self.platforms = [] self.config_entry = entry + self.cal = False + self.cal_area = None + self.cal_device = None + self.cal_device_addr = None + self.cal_time = 0.0 + self.sensor_interval = entry.options.get(CONF_UPDATE_INTERVAL, DEFAULT_UPDATE_INTERVAL) # match/replacement pairs for redacting addresses @@ -202,6 +213,7 @@ def __init__( self.options[CONF_SMOOTHING_SAMPLES] = DEFAULT_SMOOTHING_SAMPLES self.options[CONF_UPDATE_INTERVAL] = DEFAULT_UPDATE_INTERVAL self.options[CONF_RSSI_OFFSETS] = {} + self.options[CONF_BMAP] = {} if hasattr(entry, "options"): # Firstly, on some calls (specifically during reload after settings changes) @@ -218,9 +230,11 @@ def __init__( CONF_REF_POWER, CONF_SMOOTHING_SAMPLES, CONF_RSSI_OFFSETS, + CONF_BMAP, ): self.options[key] = val + self.bmap_load() self.devices: dict[str, BermudaDevice] = {} # self.updaters: dict[str, BermudaPBDUCoordinator] = {} self._has_purged = False @@ -501,17 +515,18 @@ def _get_device(self, address: str) -> BermudaDevice | None: mac = format_mac(address).lower() # format_mac tries to return a lower-cased, colon-separated mac address. # failing that, it returns the original unaltered. - if mac in self.devices: - return self.devices[mac] + if hasattr(self, devices): + if mac in self.devices: + return self.devices[mac] return None def _get_or_create_device(self, address: str) -> BermudaDevice: device = self._get_device(address) if device is None: mac = format_mac(address).lower() - self.devices[mac] = device = BermudaDevice(address=mac, options=self.options) + self.devices[mac] = device = BermudaDevice(address=mac, coordinator=self) device.address = mac - device.unique_id = mac + device.unique_id = DOMAIN + mac return device async def _async_update_data(self): @@ -682,6 +697,17 @@ async def _async_update_data(self): # Recalculate smoothed distances, last_seen etc device.calculate_data() + if self.cal: + beacon = self.cal_device + + # point, stamp = beacon.get_point_fresh() + point = beacon.get_point() + + if self.cal_area not in self.bmap._area_points: + self.bmap._area_points[self.cal_area] = [] + + self.bmap._area_points[self.cal_area].append(point) + self._refresh_areas_by_min_distance() # We might need to freshen deliberately on first start if no new scanners @@ -1086,26 +1112,34 @@ def _refresh_areas_by_min_distance(self): if device.is_scanner is not True: self._refresh_area_by_min_distance(device) + def _refresh_area_by_min_distance(self, device: BermudaDevice): + if not device.create_sensor: + return """Very basic Area setting by finding closest beacon to a given device.""" closest_scanner: BermudaDeviceScanner | None = None - for scanner in device.scanners.values(): - # Check each scanner and keep note of the closest one based on rssi_distance. - # Note that rssi_distance is smoothed/filtered, and might be None if the last - # reading was old enough that our algo decides it's "away". - if scanner.rssi_distance is not None and scanner.rssi_distance < self.options.get( - CONF_MAX_RADIUS, DEFAULT_MAX_RADIUS - ): - # It's inside max_radius... - if closest_scanner is None: - # no encumbent, we win! - closest_scanner = scanner - elif closest_scanner.rssi_distance is None or scanner.rssi_distance < closest_scanner.rssi_distance: - # We're closer than the last-closest, we win! - closest_scanner = scanner - - # Apply the newly-found closest scanner (or apply None if we didn't find one) - device.apply_scanner_selection(closest_scanner) + # for scanner in device.scanners.values(): + # # Check each scanner and keep note of the closest one based on rssi_distance. + # # Note that rssi_distance is smoothed/filtered, and might be None if the last + # # reading was old enough that our algo decides it's "away". + # if scanner.rssi_distance is not None and scanner.rssi_distance < self.options.get( + # CONF_MAX_RADIUS, DEFAULT_MAX_RADIUS + # ): + # # It's inside max_radius... + # if closest_scanner is None: + # # no encumbent, we win! + # closest_scanner = scanner + # elif closest_scanner.rssi_distance is None or scanner.rssi_distance < closest_scanner.rssi_distance: + # # We're closer than the last-closest, we win! + # closest_scanner = scanner + + # point, stamp = device.get_point_fresh() + point = device.get_point() + + if not hasattr(device, 'maptrack'): + device.maptrack = BermudaMapTrack(self.bmap) + + device.maptrack.map_point(device, point) def _refresh_scanners(self, scanners: list[BluetoothScannerDevice] | None = None): """ @@ -1401,3 +1435,11 @@ def redact_data(self, data, first_run=True): return [self.redact_data(v, False) for v in data] else: return data + + def bmap_save(self): + _LOGGER.debug("bmap_save") + self.options.update({ CONF_BMAP: self.bmap.to_dict() }) + + def bmap_load(self): + self.bmap = BermudaMap(self.options.get(CONF_BMAP)) + self.bmap.bake() diff --git a/custom_components/bermuda/device_tracker.py b/custom_components/bermuda/device_tracker.py index 54aef5e..b47383b 100644 --- a/custom_components/bermuda/device_tracker.py +++ b/custom_components/bermuda/device_tracker.py @@ -10,7 +10,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.dispatcher import async_dispatcher_connect -from .const import SIGNAL_DEVICE_NEW +from .const import * from .entity import BermudaEntity if TYPE_CHECKING: @@ -69,7 +69,7 @@ class BermudaDeviceTracker(BermudaEntity, BaseTrackerEntity): _attr_should_poll = False _attr_has_entity_name = True - _attr_name = "Bermuda Tracker" + _attr_name = "Permuda Tracker" @property def unique_id(self): diff --git a/custom_components/bermuda/diagnostics.py b/custom_components/bermuda/diagnostics.py index 6a60085..54ea017 100644 --- a/custom_components/bermuda/diagnostics.py +++ b/custom_components/bermuda/diagnostics.py @@ -28,6 +28,8 @@ async def async_get_config_entry_diagnostics(hass: HomeAssistant, entry: Bermuda data: dict[str, Any] = { "active_devices": f"{coordinator.count_active_devices()}/{len(coordinator.devices)}", "active_scanners": f"{coordinator.count_active_scanners()}/{len(coordinator.scanner_list)}", + "options": coordinator.options, + # "bmap": coordinator.bmap.to_dict(), "devices": await coordinator.service_dump_devices(call), "bt_manager": coordinator.redact_data(bt_diags), } diff --git a/custom_components/bermuda/entity.py b/custom_components/bermuda/entity.py index 80b2918..a247b61 100644 --- a/custom_components/bermuda/entity.py +++ b/custom_components/bermuda/entity.py @@ -112,7 +112,7 @@ def device_info(self): model = None if self._device.is_scanner: - connection = {(dr.CONNECTION_NETWORK_MAC, self._device.address.lower())} + connection = {(dr.CONNECTION_NETWORK_MAC, DOMAIN + self._device.address.lower())} elif self._device.address_type == ADDR_TYPE_IBEACON: # ibeacon doesn't (yet) actually set a "connection", but # this "matches" what it stores for identifier. @@ -121,7 +121,7 @@ def device_info(self): elif self._device.address_type == ADDR_TYPE_PRIVATE_BLE_DEVICE: # Private BLE Device integration doesn't specify "connection" tuples, # so we use what it defines for the "identifier" instead. - connection = {("private_ble_device", self._device.address.lower())} + connection = {(DOMAIN_PRIVATE_BLE_DEVICE, self._device.address.lower())} # We don't set the model since the Private BLE integration should have # already named it nicely. # model = f"IRK: {self._device.address.lower()[:4]}" @@ -135,6 +135,7 @@ def device_info(self): domain_name = DOMAIN_PRIVATE_BLE_DEVICE else: connection = {(dr.CONNECTION_BLUETOOTH, self._device.address.upper())} + # connection = {(DOMAIN_PRIVATE_BLE_DEVICE, self._device.address.upper())} # No need to set model, since MAC address will be shown via connection. # model = f"Bermuda: {self._device.address.lower()}" @@ -201,6 +202,6 @@ def _cached_ratelimit(self, statevalue: Any, interval: int | None = None): def device_info(self): """Implementing this creates an entry in the device registry.""" return { - "identifiers": {(DOMAIN, "BERMUDA_GLOBAL")}, - "name": "Bermuda Global", + "identifiers": {(DOMAIN, "PERMUDA_GLOBAL")}, + "name": "Permuda Global", } diff --git a/custom_components/bermuda/manifest.json b/custom_components/bermuda/manifest.json index 0ecc7d6..983117e 100644 --- a/custom_components/bermuda/manifest.json +++ b/custom_components/bermuda/manifest.json @@ -1,6 +1,6 @@ { - "domain": "bermuda", - "name": "Bermuda BLE Trilateration", + "domain": "permuda", + "name": "Permuda BLE Mapping", "bluetooth": [ { "connectable": false, @@ -14,6 +14,7 @@ "integration_type": "device", "iot_class": "calculated", "issue_tracker": "https://github.com/agittins/bermuda/issues", + "loggers": ["custom_components.permuda"], "requirements": [], "version": "0.0.0" } diff --git a/custom_components/bermuda/sensor.py b/custom_components/bermuda/sensor.py index 95095f3..631cea4 100644 --- a/custom_components/bermuda/sensor.py +++ b/custom_components/bermuda/sensor.py @@ -62,6 +62,12 @@ def device_new(address: str, scanners: list[str]) -> None: for scanner in scanners: entities.append(BermudaSensorScannerRange(coordinator, entry, address, scanner)) entities.append(BermudaSensorScannerRangeRaw(coordinator, entry, address, scanner)) + + #TODO how to add them at runtime? + entities.append(BermudaSensorValue(coordinator, entry, address, 'time_raw')) + for area in coordinator.bmap.areas: + entities.append(BermudaSensorAreaValue(coordinator, entry, address, area, 'value')) + entities.append(BermudaSensorAreaValue(coordinator, entry, address, area, 'value_raw')) # _LOGGER.debug("Sensor received new_device signal for %s", address) # We set update before add to False because we are being # call(back(ed)) from the update, so causing it to call another would be... bad. @@ -92,7 +98,7 @@ def device_new(address: str, scanners: list[str]) -> None: class BermudaSensor(BermudaEntity, SensorEntity): - """bermuda Sensor class.""" + """permuda Sensor class.""" @property def unique_id(self): @@ -130,7 +136,7 @@ def entity_registry_enabled_default(self) -> bool: def device_class(self): """Return de device class of the sensor.""" # There isn't one for "Area Names" so we'll arbitrarily define our own. - return "bermuda__custom_device_class" + return "permuda__custom_device_class" @property def extra_state_attributes(self) -> Mapping[str, Any] | None: @@ -303,7 +309,7 @@ def unique_id(self): @property def name(self): - return f"Unfiltered Distance to {self._scanner.name}" + return f"Raw Distance to {self._scanner.name}" @property def native_value(self): @@ -319,8 +325,104 @@ def native_value(self): return None +class BermudaSensorValue(BermudaSensor):#TODO obviously not meters... + """Create sensors for range to each scanner. Extends closest-range class.""" + + def __init__( + self, + coordinator: BermudaDataUpdateCoordinator, + config_entry, + address: str, + valname: str, + ) -> None: + super().__init__(coordinator, config_entry, address) + self.coordinator = coordinator + self.config_entry = config_entry + self._device = coordinator.devices[address] + self.valname = valname + @property + def unique_id(self): + return f"{self._device.unique_id}_{self.valname}" + + @property + def name(self): + return f"{self.valname}" + + @property + def entity_registry_enabled_default(self) -> bool: + return True + + @property + def native_value(self): + if hasattr(self._device, 'maptrack'): + val = getattr(self._device.maptrack, self.valname) + return val + return None + + @property + def device_class(self): + return SensorDeviceClass.MONETARY + + @property + def native_unit_of_measurement(self): + return 'XXX' + + @property + def state_class(self): + return SensorStateClass.MEASUREMENT + +class BermudaSensorAreaValue(BermudaSensor):#TODO obviously not meters... + """Create sensors for range to each scanner. Extends closest-range class.""" + + def __init__( + self, + coordinator: BermudaDataUpdateCoordinator, + config_entry, + address: str, + area: str, + valname: str, + ) -> None: + super().__init__(coordinator, config_entry, address) + self.coordinator = coordinator + self.config_entry = config_entry + self._device = coordinator.devices[address] + self._area = area + self.valname = valname + @property + def unique_id(self): + return f"{self._device.unique_id}_{self._area}_{self.valname}" + + @property + def name(self): + return f"{self.valname} for {self._area}" + + @property + def entity_registry_enabled_default(self) -> bool: + return True + + @property + def native_value(self): + if hasattr(self._device, 'maptrack'): + val = getattr(self._device.maptrack, self.valname) + if self._area in val: + return round(val[self._area], 3) + return None + + @property + def device_class(self): + return SensorDeviceClass.MONETARY + + @property + def native_unit_of_measurement(self): + return 'XXX' + + @property + def state_class(self): + return SensorStateClass.MEASUREMENT + + class BermudaGlobalSensor(BermudaGlobalEntity, SensorEntity): - """bermuda Global Sensor class.""" + """permuda Global Sensor class.""" _attr_has_entity_name = True @@ -332,7 +434,7 @@ def name(self): @property def device_class(self): """Return de device class of the sensor.""" - return "bermuda__custom_device_class" + return "permuda__custom_device_class" class BermudaTotalProxyCount(BermudaGlobalSensor): @@ -346,7 +448,7 @@ def unique_id(self): "Uniquely identify this sensor so that it gets stored in the entity_registry, and can be maintained / renamed etc by the user. """ - return "BERMUDA_GLOBAL_PROXY_COUNT" + return "PERMUDA_GLOBAL_PROXY_COUNT" @property def native_value(self) -> int: @@ -370,7 +472,7 @@ def unique_id(self): "Uniquely identify this sensor so that it gets stored in the entity_registry, and can be maintained / renamed etc by the user. """ - return "BERMUDA_GLOBAL_ACTIVE_PROXY_COUNT" + return "PERMUDA_GLOBAL_ACTIVE_PROXY_COUNT" @property def native_value(self) -> int: @@ -394,7 +496,7 @@ def unique_id(self): "Uniquely identify this sensor so that it gets stored in the entity_registry, and can be maintained / renamed etc by the user. """ - return "BERMUDA_GLOBAL_DEVICE_COUNT" + return "PERMUDA_GLOBAL_DEVICE_COUNT" @property def native_value(self) -> int: @@ -418,7 +520,7 @@ def unique_id(self): "Uniquely identify this sensor so that it gets stored in the entity_registry, and can be maintained / renamed etc by the user. """ - return "BERMUDA_GLOBAL_VISIBLE_DEVICE_COUNT" + return "PERMUDA_GLOBAL_VISIBLE_DEVICE_COUNT" @property def native_value(self) -> int: diff --git a/custom_components/bermuda/switch.py b/custom_components/bermuda/switch.py index 2c526f3..b0876c2 100644 --- a/custom_components/bermuda/switch.py +++ b/custom_components/bermuda/switch.py @@ -17,7 +17,7 @@ async def async_setup_entry(hass, entry, async_add_devices): class BermudaBinarySwitch(BermudaEntity, SwitchEntity): - """bermuda switch class.""" + """permuda switch class.""" async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument """Turn on the switch.""" diff --git a/custom_components/bermuda/translations/en.json b/custom_components/bermuda/translations/en.json index 2c75244..3744d5e 100644 --- a/custom_components/bermuda/translations/en.json +++ b/custom_components/bermuda/translations/en.json @@ -81,6 +81,57 @@ "data_description": { "scanner_info": "Leave at zero to accept the global default, or enter a non-zero number to offset the rssi reported by that scanner. Adjust until the estimated distance above matches the actual distance between that scanner and the selected transmitting device. Negative values will increase the distance, positive values will decrease it." } + }, + "calibration11_map": { + "title": "Calibration 11: Map Management", + "description": "Bla bla:\n{suffix}", + "data": { + "configured_devices": "Device", + "save_and_close": "Save and Close", + "scanner_info": "Per-Scanner RSSI Offsets" + }, + "data_description": { + "scanner_info": "Leave at zero to accept the global default, or enter a non-zero number to offset the rssi reported by that scanner. Adjust until the estimated distance above matches the actual distance between that scanner and the selected transmitting device. Negative values will increase the distance, positive values will decrease it." + } + }, + "calibration3_area_map": { + "title": "Calibration 3: Mapping Areas", + "description": "Bla bla:\n{suffix}", + "data": { + "configured_devices": "Device", + "save_and_close": "Save and Close", + "scanner_info": "Per-Scanner RSSI Offsets" + }, + "data_description": { + "scanner_info": "Leave at zero to accept the global default, or enter a non-zero number to offset the rssi reported by that scanner. Adjust until the estimated distance above matches the actual distance between that scanner and the selected transmitting device. Negative values will increase the distance, positive values will decrease it." + } + }, + "calibration31_calibrating": { + "description": "Device: {device}\nArea: {area}\nPoints: {count}\nLast: {point}" + }, + "calibration31_finish": { + "title": "Calibration 3: finished", + "description": "Bla bla:\n{suffix}", + "data": { + "configured_devices": "Device", + "save_and_close": "Save and Close", + "scanner_info": "Per-Scanner RSSI Offsets" + }, + "data_description": { + "scanner_info": "aoeuoaeu" + } + }, + "calibration31_abort": { + "title": "Calibration 3: abort", + "description": "Bla bla:\n{suffix}", + "data": { + "configured_devices": "Device", + "save_and_close": "Save and Close", + "scanner_info": "Per-Scanner RSSI Offsets" + }, + "data_description": { + "scanner_info": "aoeuoaeu" + } } } }, diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..46aad77 --- /dev/null +++ b/default.nix @@ -0,0 +1,21 @@ +with import <nixpkgs>{}; + +stdenv.mkDerivation { + name = "esphome-build"; + + nativeBuildInputs = [ + platformio + python3Packages.pip + python3Packages.pytest +# python3Packages.json + python3Packages.numpy + python3Packages.pytorch + python3Packages.matplotlib + ]; +} + +# nom-shell +# cd tools +# ./graph.py #to open the example data.json +# #or give a path +# ./graph.py path_to_data.json diff --git a/hacs.json b/hacs.json index d4f50a4..685fc8e 100644 --- a/hacs.json +++ b/hacs.json @@ -1,9 +1,9 @@ { - "name": "Bermuda BLE Trilateration", + "name": "Permuda BLE Mapping", "hacs": "1.6.0", "homeassistant": "2024.6", "render_readme": true, "zip_release": true, - "filename": "bermuda.zip", + "filename": "permuda.zip", "hide_default_branch": true } diff --git a/tools/__init__.py b/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tools/graph.py b/tools/graph.py new file mode 100755 index 0000000..6bee82a --- /dev/null +++ b/tools/graph.py @@ -0,0 +1,368 @@ +#!/usr/bin/env python3 + +import json +import matplotlib.pyplot as plt +import matplotlib.colors as mcolors +import numpy as np + +# from matplotlib.widgets import Slider +from matplotlib.widgets import RadioButtons, CheckButtons +# import mpl_interactions.ipyplot as iplt + +import sys, os +# sys.path.insert(1, '../custom_components/bermuda') #ehhh +# sys.path.insert(1, '../') #ehhh +# sys.path.insert(1, '../custom_components') #ehhh +# sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../") +# sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../custom_components") +sys.path.append(os.path.dirname(os.path.realpath(__file__)) + "/../custom_components/bermuda") + +# from ..custom_components.bermuda.map import * +# from ..custom_components.bermuda.point import * +from common.map import * +from common.point import * + +path = 'data.json' +# print ('argument list', sys.argv) +if len(sys.argv) == 2: + path = sys.argv[1] + +with open(path, 'r') as file: + data = json.load(file) + +bmap = BermudaMap(data['data']['options']['bmap']) + +# bmap.bake() + +# print(bmap.baked) + +scanners = list(bmap.scanners) +areas = list(bmap.areas) +area_col = plt.rcParams['axes.prop_cycle'].by_key()['color'] +# print('area_col', area_col) +def get_area_col(area, alpha=0.2): + return (mcolors.to_rgba(area_col[areas.index(area)], alpha)) + +# measures = { 'rssi', 'dist', 'dist_raw' } +measures = [ 'rssi', 'dist', 'dist_raw' ] + +time_cut = 0.2 + +fmeasures = [] + +for m in measures: + defa = -1.0 + if m == 'rssi': + defa = -110 + + fm = m + '_fresh' + fmeasures.append(fm) + + data[m] = {} + data[fm] = {} + for area, points in bmap._area_points.items(): + if points: + data[m][area] = [] + data[fm][area] = [] + for s in scanners: + # data[m][area].append(np.array([d[s][m] for d in points])) + dat = np.ndarray(0) + fdat = [] + for p in points: + val = p.get(s, m) + if val is not None: + dat = np.append(dat, val) + # dat.append(p[s][m]) + else: + dat = np.append(dat, defa) + # dat.append(defa) + + val = p.fresh_cut().get(s, m) + if val is not None: + fdat.append(val) + else: + fdat.append(defa) + + data[m][area].append(dat) + # data[m][area].append(np.array(dat)) + data[fm][area].append(np.array(fdat)) + +measures.extend(fmeasures) + +fig, ax = plt.subplot_mosaic( + [ + ['y', 'main', 'mm'], + ['y', 'main', 'area_mesh'], + ['x', 'sec', 'area_pt'], + ['x', 'sec', 'sm'], + # ['t', 't', 't'], + ], + width_ratios=[1, 6, 1], + # height_ratios=[8, 1], + layout='constrained', + sharex = True, + sharey = True, + # subplot_kw={"projection": "3d"}, +) + +al = 0.3 +x = 0 +y = 1 +# mm = measures.index('dist_fresh') +# mm = measures.index('dist_raw_fresh') +mm = measures.index(bmap.metric + '_fresh') +sm = mm +# sm = measures.index('dist_raw_fresh') +st_areas = bmap.areas +mesh_area = areas[0] +mesh_area = 'value' + +def test(mx, my): + coord = [None] * len(bmap.scanners) + if mx >= 0: + coord[x] = mx + elif mx > -1: + coord[x] = 0 + if my >= 0: + coord[y] = my + elif my > -1: + coord[y] = 0 + + if mesh_area == 'detect': + ret = bmap._map_point(tuple(coord)) + # ret = bmap._map_point_nn(tuple(coord)) + if ret is not None: + area = ret['area'] + if area in st_areas: + # return (255, 0, 0, 255) + return get_area_col(area) + return (0.0, 0.0, 0.0, 0.0) + + ret = bmap._point_probs(tuple(coord)) + + area = mesh_area + if mesh_area == 'value': + area = max(ret, key=ret.get) + + value = ret[area] + value = np.clip(value / 100.0, 0.0, 1.0) + return get_area_col(area, value) + + # if ret is None: + # return 0.0 + # elif ret['area'] == mesh_area: + # return 1.0 + # else: + # return -1.0 + +def update_mesh(): + global al, x, y, mm, sm + + # return + axes = ax['main'] + + # mx, my = np.mgrid[-3:3:complex(0, N), -2:2:complex(0, N)] + extent = [-1, 30, -1, 30] + res = np.linspace(-1, 30, 100) + ires = len(res) + + # mx, my = np.meshgrid(res, res) + # print('mx', mx) + # print('my', my) + # mt = np.vectorize(test) + # mz = mt(mx, my) + # mz = np.moveaxis(mz, 0, 2) + # mz = mx + mz = np.full((ires, ires, 4), 0.0) + for iy in range(ires): + for ix in range(ires): + mz[iy, ix] = test(res[ix], res[iy]) + + # mz.permute(1, 2, 0) + # print('mz', mz) + # for iy + # axes.contourf(mx, my, mz) + # axes.pcolormesh(mx, my, mz, shading='nearest') + axes.imshow(mz, origin='lower', extent=extent, interpolation=None, aspect='auto') + # plt.draw() + + +def update_ms(): + global al, x, y, mm, sm + + axes = ax['main'] + # plt.xlabel(scanners[x]) + # plt.ylabel(scanners[y]) + axes.clear() + + # for name, dat in data[measures[mm]].items(): + # if name in st_areas: + # axes.scatter(dat[x], dat[y], alpha=al, label=name) + for name in bmap.areas: + dat = data[measures[mm]][name] + a = 0 + if name in st_areas: + a = al + axes.scatter(dat[x], dat[y], alpha=a, label=name) + # axes.plot(dat[x], dat[y], alpha=a, label=name) + + # axes.set_xlabel(scanners[x]) + # axes.set_ylabel(scanners[y]) + axes.legend() + + update_mesh() + + fig.show() + plt.draw() + + +def update_ss(): + global al, x, y, mm, sm + + axes = ax['sec'] + axes.clear() + # for name, dat in data[measures[sm]].items(): + # if name in st_areas: + # axes.scatter(dat[x], dat[y], alpha=al, label=name) + for name in bmap.areas: + dat = data[measures[sm]][name] + # print(name) + + fuu = np.stack((dat[x], dat[y]), axis=-1) + fu = [] + for i in range(len(fuu)): + if fuu[i][0] >= 0 or fuu[i][1] >= 0: + fu.append(fuu[i]) + + values, counts = np.unique(fu, axis = 0, return_inverse=False, return_counts=True) + # print('\nvalues:\n') + # print(values) + # print('\n\ncounts:\n') + # print(counts) + for i in range(len(counts)): + counts[i] *= plt.rcParams['lines.markersize'] ** 2 + a = 0 + if name in st_areas: + a = al + if len(values): + axes.scatter(values[:,0], values[:,1], s=counts, alpha=a, label=name) + # axes.scatter(dat[x], dat[y], alpha=a, label=name) + # axes.plot(dat[x], dat[y], alpha=a, label=name) + + # axes.hist2d(data[measures[mm]]['kitchen'][x], data[measures[mm]]['kitchen'][y]) + + axes.legend() + + +def update(): + update_ss() + update_ms() + + # .canvas.draw_idle() + # fig.draw() + # plt.draw() + # fig.show() + +# fig.legend() + +def change_x(label): + global x, y, mm, sm + x = scanners.index(label) + update() + +def change_y(label): + global x, y, mm, sm + y = scanners.index(label) + update() + +def change_mm(label): + global x, y, mm, sm + # mm = label + mm = measures.index(label) + update() + +def change_sm(label): + global x, y, mm, sm + sm = measures.index(label) + update() + +def change_mesh(label): + global x, y, mm, sm, mesh_area + mesh_area = label + update_ms() + +radio_background = 'lightgoldenrodyellow' + +ax['x'].set_facecolor(radio_background) +radiox = RadioButtons(ax['x'], scanners, active = x, + # label_props={'color': 'cmy', 'fontsize': [12, 14, 16]}, + # radio_props={'s': [16, 32, 64]} + ) +radiox.on_clicked(change_x) + +ax['y'].set_facecolor(radio_background) +radioy = RadioButtons(ax['y'], scanners, active = y, + # label_props={'color': 'cmy', 'fontsize': [12, 14, 16]}, + # radio_props={'s': [16, 32, 64]} + ) +radioy.on_clicked(change_y) + +ax['mm'].set_facecolor(radio_background) +radio_mm = RadioButtons(ax['mm'], measures, active = mm) +radio_mm.on_clicked(change_mm) + +ax['sm'].set_facecolor(radio_background) +radio_sm = RadioButtons(ax['sm'], measures, active = sm) +radio_sm.on_clicked(change_sm) + +ax['area_mesh'].set_facecolor(radio_background) +radio_mesh = RadioButtons(ax['area_mesh'], ['detect', 'value'] + areas, active = 1) +radio_mesh.on_clicked(change_mesh) + +ax['area_pt'].set_facecolor(radio_background) +room_check = CheckButtons(ax['area_pt'], areas, tuple(True for _ in range(len(areas)))) + +def change_areas(label): + global x, y, mm, sm, st_areas + st_areas = set(room_check.get_checked_labels()) + update() +room_check.on_clicked(change_areas) + + +tunefig, tuneax = plt.subplots(len(bmap.tune.names)) +# tunefig = fig.add_subfigure(gridspec[:, 0]) +sliders = {} + +idx = 0 +for name in bmap.tune.names: + a = tuneax[idx] + # a = tunefig.add_subplot() + sliders[name] = plt.Slider(a, name, bmap.tune.rmin[name], bmap.tune.rmax[name], valinit=getattr(bmap.tune, name)) + idx += 1 + +def tune_update(val): + # global bmap + for name in bmap.tune.names: + setattr(bmap.tune, name, sliders[name].val) + + bmap.bake() + + bmap.tune.print() + + # bmap.calc_stats() + bmap.calc_stats2() + + # bmap.bake_nn() + # bmap.calc_stats_nn() + + update_ms() + +for slider in sliders.values(): + slider.on_changed(tune_update) + +update_ss() +tune_update(0) +# update() + +plt.show() +