From dde0517f8a1a4135c4e0269c6ac46dba750c3213 Mon Sep 17 00:00:00 2001 From: Anton M Date: Fri, 10 Jan 2025 19:09:04 +0100 Subject: [PATCH] move backup to post_save signal --- validity/data_backends.py | 12 ++---------- validity/models/data.py | 23 ++++++++++++++++------- validity/signals.py | 11 ++++++++++- 3 files changed, 28 insertions(+), 18 deletions(-) diff --git a/validity/data_backends.py b/validity/data_backends.py index abbde4b..01434dd 100644 --- a/validity/data_backends.py +++ b/validity/data_backends.py @@ -11,8 +11,7 @@ from netbox.config import ConfigItem from netbox.data_backends import DataBackend -from validity.models import BackupPoint, VDevice -from validity.utils.bulk import bulk_backup +from validity.models import VDevice from .pollers.result import DescriptiveError, PollingInfo @@ -38,7 +37,6 @@ class PollingBackend(DataBackend): .annotate_datasource_id() .order_by("poller_id") ) - backup_qs = BackupPoint.objects.filter(backup_after_sync=True) metainfo_file = Path("polling_info.yaml") @property @@ -73,12 +71,8 @@ def start_polling(self, devices) -> tuple[list[Generator], set[DescriptiveError] result_generators.append(poller.get_backend().poll(device_group)) return result_generators, no_poller_errors - def backup_datasource(self): - backup_points = self.backup_qs.filter(data_source__pk=self.datasource_id) - bulk_backup(backup_points) - @contextmanager - def fetch(self, device_filter: Q | None = None, do_backup: bool = True): + def fetch(self, device_filter: Q | None = None): with TemporaryDirectory() as dir_name: devices = self.bound_devices_qs(device_filter or Q()) result_generators, errors = self.start_polling(devices) @@ -89,8 +83,6 @@ def fetch(self, device_filter: Q | None = None, do_backup: bool = True): polling_info = PollingInfo(devices_polled=devices.count(), errors=errors, partial_sync=bool(device_filter)) self.write_metainfo(dir_name, polling_info) yield dir_name - if do_backup: - self.backup_datasource() backends = [PollingBackend] diff --git a/validity/models/data.py b/validity/models/data.py index 29af367..c14c679 100644 --- a/validity/models/data.py +++ b/validity/models/data.py @@ -82,7 +82,7 @@ def _sync_status(self): DataSource.objects.filter(pk=self.pk).update(status=self.status, last_synced=self.last_synced) post_sync.send(sender=self.__class__, instance=self) - def partial_sync(self, device_filter: Q, batch_size: int = 1000, do_backup: bool = True) -> set[str]: + def partial_sync(self, device_filter: Q, batch_size: int = 1000) -> set[str]: def update_batch(batch): for datafile in self.datafiles.filter(path__in=batch).iterator(): if datafile.refresh_from_disk(local_path): @@ -96,8 +96,7 @@ def new_data_file(path): return df backend = self.get_backend() - do_backup = self.permit_backup and do_backup - fetch = backend.fetch(device_filter, do_backup) if self.type == "device_polling" else backend.fetch() + fetch = backend.fetch(device_filter) if self.type == "device_polling" else backend.fetch() with fetch as local_path, self._sync_status(): all_new_paths = self._walk(local_path) updated_paths = set() @@ -112,14 +111,24 @@ def new_data_file(path): logger.debug("%s new files were created and %s existing files were updated during sync", created, updated) return all_new_paths - def sync(self, device_filter: Q | None = None, do_backup: bool = True): + def sync(self, device_filter: Q | None = None): if not device_filter or self.type != "device_polling": return super().sync() - self.partial_sync(device_filter, do_backup) + self.partial_sync(device_filter) + + @contextmanager + def _backup_allowed(self, is_allowed: bool): + prev_value = self.permit_backup + self.permit_backup = is_allowed + try: + yield + finally: + self.permit_backup = prev_value def sync_in_migration(self, datafile_model: type): """ This method performs sync and avoids problems with historical models which have reference to DataFile """ - new_paths = self.partial_sync(Q(), do_backup=False) - datafile_model.objects.exclude(path__in=new_paths).delete() + with self._backup_allowed(False): + new_paths = self.partial_sync(Q()) + datafile_model.objects.exclude(path__in=new_paths).delete() diff --git a/validity/signals.py b/validity/signals.py index d3b38a2..ced98ab 100644 --- a/validity/signals.py +++ b/validity/signals.py @@ -1,9 +1,18 @@ +from core.signals import post_sync from django.db.models.signals import pre_delete from django.dispatch import receiver -from validity.models import ComplianceReport +from validity.models import BackupPoint, ComplianceReport +from validity.utils.bulk import bulk_backup @receiver(pre_delete, sender=ComplianceReport) def delete_bound_jobs(sender, instance, **kwargs): instance.jobs.all().delete() + + +@receiver(post_sync) +def backup_datasource(sender, instance, **kwargs): + if getattr(instance, "permit_backup", True): + backup_points = BackupPoint.objects.filter(backup_after_sync=True, data_source=instance) + bulk_backup(backup_points)