diff --git a/validity/api/helpers.py b/validity/api/helpers.py index 1ae8189..0f09cb7 100644 --- a/validity/api/helpers.py +++ b/validity/api/helpers.py @@ -53,11 +53,17 @@ def proxy_factory( class EncryptedDictField(JSONField): + def __init__(self, **kwargs): + self.do_not_encrypt = kwargs.pop("do_not_encrypt", ()) + super().__init__(**kwargs) + def to_representation(self, value): + if not isinstance(value, EncryptedDict): + value = EncryptedDict(value, do_not_encrypt=self.do_not_encrypt) return value.encrypted def to_internal_value(self, data): - return EncryptedDict(super().to_internal_value(data)) + return EncryptedDict(super().to_internal_value(data), do_not_encrypt=self.do_not_encrypt) class ListQPMixin: @@ -109,10 +115,12 @@ def _validate(self, attrs): ": ".join((field, err[0])) if field != "__all__" else err for field, err in subform.errors.items() ] raise ValidationError({instance.subform_json_field: errors}) + instance.subform_json = attrs[instance.subform_json_field] = subform.cleaned_data + return attrs def validate(self, attrs): if isinstance(attrs, dict): - self._validate(attrs) + attrs = self._validate(attrs) return attrs diff --git a/validity/api/serializers.py b/validity/api/serializers.py index a52ff46..b1baa7e 100644 --- a/validity/api/serializers.py +++ b/validity/api/serializers.py @@ -377,10 +377,11 @@ def validate(self, data, command_types: Annotated[dict[str, list[str]], "PollerC NestedPollerSerializer = nested_factory(PollerSerializer, nb_version=config.netbox_version) -class BackupPointSerializer(NetBoxModelSerializer): +class BackupPointSerializer(SubformValidationMixin, NetBoxModelSerializer): url = serializers.HyperlinkedIdentityField(view_name="plugins-api:validity-api:backuppoint-detail") data_source = NestedDataSourceSerializer() - parameters = EncryptedDictField() + upload_url = serializers.CharField(source="url") + parameters = EncryptedDictField(do_not_encrypt=models.BackupPoint._meta.get_field("parameters").do_not_encrypt) class Meta: model = models.BackupPoint @@ -390,11 +391,13 @@ class Meta: "display", "name", "data_source", - "backup_after_sync", + "enabled", "method", - "url", + "upload_url", "ignore_rules", "parameters", + "last_error", + "last_status", "last_uploaded", "tags", "custom_fields", diff --git a/validity/choices.py b/validity/choices.py index 9650c21..ddecf22 100644 --- a/validity/choices.py +++ b/validity/choices.py @@ -132,3 +132,8 @@ class JSONAPIMethodChoices(TextChoices): class BackupMethodChoices(TextChoices, metaclass=ColoredChoiceMeta): git = "git", "blue" S3 = "S3", "Amazon S3", "yellow" + + +class BackupStatusChoices(TextChoices, metaclass=ColoredChoiceMeta): + completed = "completed", "green" + failed = "failed", "red" diff --git a/validity/data_backends.py b/validity/data_backends.py index e8ad769..340a8d9 100644 --- a/validity/data_backends.py +++ b/validity/data_backends.py @@ -11,7 +11,8 @@ from netbox.config import ConfigItem from netbox.data_backends import DataBackend -from validity.models import VDevice +from validity.models import BackupPoint, VDevice +from validity.utils.bulk import bulk_backup from .pollers.result import DescriptiveError, PollingInfo @@ -37,13 +38,18 @@ class PollingBackend(DataBackend): .annotate_datasource_id() .order_by("poller_id") ) + backup_qs = BackupPoint.objects.filter(enabled=True) metainfo_file = Path("polling_info.yaml") + @property + def datasource_id(self): + ds_id = self.params.get("datasource_id") + assert ds_id, 'Data Source parameters must contain "datasource_id"' + return ds_id + def bound_devices_qs(self, device_filter: Q): - datasource_id = self.params.get("datasource_id") - assert datasource_id, 'Data Source parameters must contain "datasource_id"' return ( - self.devices_qs.filter(data_source_id=datasource_id) + self.devices_qs.filter(data_source_id=self.datasource_id) .filter(device_filter) .set_attribute("prefer_ipv4", ConfigItem("PREFER_IPV4")()) ) @@ -67,8 +73,12 @@ def start_polling(self, devices) -> tuple[list[Generator], set[DescriptiveError] result_generators.append(poller.get_backend().poll(device_group)) return result_generators, no_poller_errors + def backup_datasource(self): + backup_points = self.backup_qs.filter(data_source__pk=self.datasource_id) + bulk_backup(backup_points) + @contextmanager - def fetch(self, device_filter: Q | None = None): + def fetch(self, device_filter: Q | None = None, do_backup: bool = True): with TemporaryDirectory() as dir_name: devices = self.bound_devices_qs(device_filter or Q()) result_generators, errors = self.start_polling(devices) @@ -79,6 +89,8 @@ def fetch(self, device_filter: Q | None = None): polling_info = PollingInfo(devices_polled=devices.count(), errors=errors, partial_sync=bool(device_filter)) self.write_metainfo(dir_name, polling_info) yield dir_name + if do_backup: + self.backup_datasource() backends = [PollingBackend] diff --git a/validity/fields/encrypted.py b/validity/fields/encrypted.py index fbe0b3e..1d9d658 100644 --- a/validity/fields/encrypted.py +++ b/validity/fields/encrypted.py @@ -172,8 +172,8 @@ def formfield(self, **kwargs): }, ) - def value_to_string(self, obj: Any) -> Any: - obj = super().value_to_string(obj) + def value_from_object(self, obj: Any) -> Any: + obj = super().value_from_object(obj) if isinstance(obj, EncryptedDict): obj = obj.encrypted return obj diff --git a/validity/filtersets.py b/validity/filtersets.py index 4010739..5398de4 100644 --- a/validity/filtersets.py +++ b/validity/filtersets.py @@ -141,9 +141,9 @@ class Meta: class BackupPointFilterSet(SearchMixin, NetBoxModelFilterSet): - datasource_id = ModelMultipleChoiceFilter(field_name="data_source", queryset=models.VDataSource.objects.all()) + data_source_id = ModelMultipleChoiceFilter(field_name="data_source", queryset=models.VDataSource.objects.all()) class Meta: model = models.BackupPoint - fields = ("id", "name", "method", "datasource_id", "backup_after_sync", "last_uploaded") + fields = ("id", "name", "method", "data_source_id", "enabled", "last_uploaded", "last_status") search_fields = ("name",) diff --git a/validity/forms/bulk_import.py b/validity/forms/bulk_import.py index b9f32e9..00ae348 100644 --- a/validity/forms/bulk_import.py +++ b/validity/forms/bulk_import.py @@ -10,15 +10,15 @@ from validity import choices, di, models from validity.api.helpers import SubformValidationMixin -from ..utils.misc import LazyIterator +from validity.utils.misc import LazyIterator from .mixins import PollerCleanMixin class SubFormMixin(SubformValidationMixin): def clean(self): validated_data = {k: v for k, v in self.cleaned_data.items() if not k.startswith("_")} - self.validate(validated_data) - return self.cleaned_data + attrs = self.validate(validated_data) + return self.cleaned_data | attrs class DataSourceMixin(Form): @@ -209,9 +209,9 @@ class Meta: fields = ("name", "connection_type", "commands", "public_credentials", "private_credentials") -class BackupPointImportForm(NetBoxModelImportForm): +class BackupPointImportForm(SubFormMixin, NetBoxModelImportForm): data_source = CSVModelChoiceField( - queryset=models.VDataSource.objects.all(), to_field_name="name", help_text=_("Data Source") + queryset=DataSource.objects.all(), to_field_name="name", help_text=_("Data Source") ) parameters = JSONField( help_text=_( @@ -222,4 +222,4 @@ class BackupPointImportForm(NetBoxModelImportForm): class Meta: model = models.BackupPoint - fields = ("name", "data_source", "backup_after_sync", "url", "method", "ignore_rules", "parameters") + fields = ("name", "data_source", "enabled", "url", "method", "ignore_rules", "parameters") diff --git a/validity/forms/filterset.py b/validity/forms/filterset.py index 61081e9..baa3ea8 100644 --- a/validity/forms/filterset.py +++ b/validity/forms/filterset.py @@ -14,6 +14,7 @@ from validity import di, models from validity.choices import ( BackupMethodChoices, + BackupStatusChoices, BoolOperationChoices, CommandTypeChoices, DeviceGroupByChoices, @@ -197,12 +198,11 @@ class CommandFilterForm(NetBoxModelFilterSetForm): class BackupPointFilterForm(NetBoxModelFilterSetForm): model = models.BackupPoint name = CharField(required=False) - datasource_id = DynamicModelMultipleChoiceField( + data_source_id = DynamicModelMultipleChoiceField( label=_("Data Source"), queryset=DataSource.objects.all(), required=False ) - backup_after_sync = NullBooleanField( - label=_("Backup After Sync"), required=False, widget=Select(choices=BOOLEAN_WITH_BLANK_CHOICES) - ) + enabled = NullBooleanField(label=_("Enabled"), required=False, widget=Select(choices=BOOLEAN_WITH_BLANK_CHOICES)) method = PlaceholderChoiceField(required=False, label=_("Backup Method"), choices=BackupMethodChoices.choices) + last_status = PlaceholderChoiceField(required=False, label=_("Last Status"), choices=BackupStatusChoices.choices) last_uploaded__lte = DateTimeField(required=False, widget=DateTimePicker(), label=_("Last Uploaded Before")) last_uploaded__gte = DateTimeField(required=False, widget=DateTimePicker(), label=_("Last Uploaded After")) diff --git a/validity/forms/general.py b/validity/forms/general.py index 9ba8c41..8ea5a05 100644 --- a/validity/forms/general.py +++ b/validity/forms/general.py @@ -167,11 +167,11 @@ class Meta: class BackupPointForm(SubformMixin, NetBoxModelForm): class Meta: model = models.BackupPoint - fields = ("name", "data_source", "backup_after_sync", "url", "method", "ignore_rules") + fields = ("name", "data_source", "enabled", "url", "method", "ignore_rules", "tags") widgets = {"method": HTMXSelect()} main_fieldsets = [ - FieldSet("name", "data_source", "backup_after_sync", "url", "method", "ignore_rules", name=_("Backup Point")), + FieldSet("name", "data_source", "enabled", "url", "method", "ignore_rules", "tags", name=_("Backup Point")), ] diff --git a/validity/managers.py b/validity/managers.py index 5225c32..8c18223 100644 --- a/validity/managers.py +++ b/validity/managers.py @@ -81,7 +81,7 @@ class VDataFileQS(RestrictedQuerySet): pass -class VDataSourceQS(CustomPrefetchMixin, RestrictedQuerySet): +class VDataSourceQS(SetAttributesMixin, CustomPrefetchMixin, RestrictedQuerySet): def annotate_config_path(self): return self.annotate(device_config_path=KeyTextTransform("device_config_path", "custom_field_data")) @@ -91,6 +91,11 @@ def annotate_command_path(self): def annotate_paths(self): return self.annotate_config_path().annotate_command_path() + def prefetch_files(self): + from validity.models import VDataFile + + return self.prefetch_related(Prefetch(VDataFile.objects.all())) + class ComplianceReportQS(ValiditySettingsMixin, RestrictedQuerySet): def annotate_result_stats(self, groupby_field: DeviceGroupByChoices | None = None): diff --git a/validity/migrations/0012_backuppoint.py b/validity/migrations/0012_backuppoint.py index eea4104..d1b2c5e 100644 --- a/validity/migrations/0012_backuppoint.py +++ b/validity/migrations/0012_backuppoint.py @@ -1,18 +1,17 @@ -# Generated by Django 4.2.11 on 2024-12-23 23:44 +# Generated by Django 5.0.10 on 2025-01-03 01:07 import django.core.validators -from django.db import migrations, models import django.db.models.deletion import taggit.managers import utilities.json import validity.fields.encrypted import validity.models.base +from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ - ('extras', '0107_cachedvalue_extras_cachedvalue_object'), ('validity', '0011_delete_scripts'), ] @@ -25,13 +24,15 @@ class Migration(migrations.Migration): ('last_updated', models.DateTimeField(auto_now=True, null=True)), ('custom_field_data', models.JSONField(blank=True, default=dict, encoder=utilities.json.CustomFieldJSONEncoder)), ('name', models.CharField(max_length=255, unique=True)), - ('backup_after_sync', models.BooleanField()), + ('enabled', models.BooleanField()), ('method', models.CharField(max_length=20)), ('url', models.CharField(max_length=255, validators=[django.core.validators.URLValidator(schemes=['http', 'https'])])), ('ignore_rules', models.TextField(blank=True)), - ('parameters', validity.fields.encrypted.EncryptedDictField(do_not_encrypt=('username', 'branch', 'aws_access_key_id'))), + ('parameters', validity.fields.encrypted.EncryptedDictField(do_not_encrypt=('username', 'branch', 'aws_access_key_id', 'archive'))), ('last_uploaded', models.DateTimeField(blank=True, editable=False, null=True)), - ('data_source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='validity.vdatasource')), + ('last_status', models.CharField(blank=True, editable=False)), + ('last_error', models.CharField(blank=True, editable=False)), + ('data_source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='backup_points', to='validity.vdatasource')), ('tags', taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag')), ], options={ diff --git a/validity/models/backup.py b/validity/models/backup.py index d2b38fe..dd13012 100644 --- a/validity/models/backup.py +++ b/validity/models/backup.py @@ -8,9 +8,10 @@ from django.utils.translation import gettext_lazy as _ from validity import di -from validity.choices import BackupMethodChoices +from validity.choices import BackupMethodChoices, BackupStatusChoices from validity.data_backup import BackupBackend -from validity.fields import EncryptedDictField +from validity.fields import EncryptedDict, EncryptedDictField +from validity.integrations.errors import IntegrationError from validity.subforms import GitBackupForm, S3BackupForm from .base import BaseModel, SubformMixin from .data import VDataSource @@ -18,12 +19,14 @@ class BackupPoint(SubformMixin, BaseModel): name = models.CharField(_("Name"), max_length=255, unique=True) - data_source = models.ForeignKey(VDataSource, verbose_name=_("Data Source"), on_delete=models.CASCADE) - backup_after_sync = models.BooleanField( - _("Backup after sync"), help_text=_("Perform a backup every time the linked Data Source is synced") + data_source = models.ForeignKey( + VDataSource, verbose_name=_("Data Source"), on_delete=models.CASCADE, related_name="backup_points" + ) + enabled = models.BooleanField( + _("Enabled"), help_text=_("Perform a backup every time the linked Data Source is being synced"), default=True ) method = models.CharField(_("Backup Method"), choices=BackupMethodChoices.choices, max_length=20) - # TODO: add link to the docs scpecifying possible URLs + # TODO: add link to the docs specifying possible URLs url = models.CharField(_("URL"), max_length=255, validators=[URLValidator(schemes=["http", "https"])]) ignore_rules = models.TextField( verbose_name=_("Ignore Rules"), @@ -34,10 +37,14 @@ class BackupPoint(SubformMixin, BaseModel): _("Parameters"), do_not_encrypt=("username", "branch", "aws_access_key_id", "archive") ) last_uploaded = models.DateTimeField(_("Last Uploaded"), editable=False, blank=True, null=True) + last_status = models.CharField(_("Last Status"), editable=False, blank=True, choices=BackupStatusChoices.choices) + last_error = models.CharField(_("Last Error"), editable=False, blank=True) + clone_fields = ("data_source", "url", "enabled", "method", "ignore_rules", "parameters") subform_type_field = "method" subform_json_field = "parameters" subforms = {"git": GitBackupForm, "S3": S3BackupForm} + always_ignore = {"polling_info.yaml"} class Meta: verbose_name = _("Backup Point") @@ -53,7 +60,7 @@ def __str__(self) -> str: return self.name def clean(self): - if self.data_source.type != "device_polling": + if hasattr(self, "data_source") and self.data_source.type != "device_polling": raise ValidationError( {"data_source": _('Backups are supported for Data Sources with type "Device Polling" only')} ) @@ -63,15 +70,32 @@ def clean(self): def get_method_color(self): return BackupMethodChoices.colors.get(self.method) + def get_last_status_color(self): + return BackupStatusChoices.colors.get(self.last_status) + + def serialize_object(self, exclude=None): + if not isinstance(self.parameters, EncryptedDict): + do_not_encrypt = self._meta.get_field("parameters").do_not_encrypt + self.parameters = EncryptedDict(self.parameters, do_not_encrypt=do_not_encrypt) + return super().serialize_object(exclude) + def do_backup(self) -> None: """ Perform backup depending on chosen method - Raises: IntegrationError """ - self._backup_backend(self) - self.last_uploaded = timezone.now() + try: + self._backup_backend(self) + self.last_status = BackupStatusChoices.completed + self.last_error = "" + except IntegrationError as e: + self.last_error = str(e) + self.last_status = BackupStatusChoices.failed + finally: + self.last_uploaded = timezone.now() def ignore_file(self, path: str) -> bool: + if path in self.always_ignore: + return True for rule in self.ignore_rules.splitlines(): if fnmatchcase(path, rule): return True diff --git a/validity/models/data.py b/validity/models/data.py index 03e2702..29af367 100644 --- a/validity/models/data.py +++ b/validity/models/data.py @@ -30,6 +30,10 @@ class VDataSource(DataSource): class Meta: proxy = True + def __init__(self, *args, permit_backup: bool = True, **kwargs): + self.permit_backup = permit_backup + super().__init__(*args, **kwargs) + @property def bound_devices(self): from validity.models.device import VDevice @@ -78,7 +82,7 @@ def _sync_status(self): DataSource.objects.filter(pk=self.pk).update(status=self.status, last_synced=self.last_synced) post_sync.send(sender=self.__class__, instance=self) - def partial_sync(self, device_filter: Q, batch_size: int = 1000) -> set[str]: + def partial_sync(self, device_filter: Q, batch_size: int = 1000, do_backup: bool = True) -> set[str]: def update_batch(batch): for datafile in self.datafiles.filter(path__in=batch).iterator(): if datafile.refresh_from_disk(local_path): @@ -92,7 +96,8 @@ def new_data_file(path): return df backend = self.get_backend() - fetch = backend.fetch(device_filter) if self.type == "device_polling" else backend.fetch() + do_backup = self.permit_backup and do_backup + fetch = backend.fetch(device_filter, do_backup) if self.type == "device_polling" else backend.fetch() with fetch as local_path, self._sync_status(): all_new_paths = self._walk(local_path) updated_paths = set() @@ -107,14 +112,14 @@ def new_data_file(path): logger.debug("%s new files were created and %s existing files were updated during sync", created, updated) return all_new_paths - def sync(self, device_filter: Q | None = None): + def sync(self, device_filter: Q | None = None, do_backup: bool = True): if not device_filter or self.type != "device_polling": return super().sync() - self.partial_sync(device_filter) + self.partial_sync(device_filter, do_backup) def sync_in_migration(self, datafile_model: type): """ This method performs sync and avoids problems with historical models which have reference to DataFile """ - new_paths = self.partial_sync(Q()) + new_paths = self.partial_sync(Q(), do_backup=False) datafile_model.objects.exclude(path__in=new_paths).delete() diff --git a/validity/navigation.py b/validity/navigation.py index e1b719f..fb76d42 100644 --- a/validity/navigation.py +++ b/validity/navigation.py @@ -50,7 +50,7 @@ def model_menu_item(entity, title, buttons=()): polling_menu_items = ( model_menu_item("command", "Commands", [model_add_button, model_import_button]), model_menu_item("poller", "Pollers", [model_add_button, model_import_button]), - model_menu_item("backuppoint", "Backups", [model_add_button, model_import_button]), + model_menu_item("backuppoint", "Backup Points", [model_add_button, model_import_button]), ) menu = plugins.PluginMenu( diff --git a/validity/scripts/runtests/split.py b/validity/scripts/runtests/split.py index a9aaeb5..190a92c 100644 --- a/validity/scripts/runtests/split.py +++ b/validity/scripts/runtests/split.py @@ -6,8 +6,9 @@ from django.db.models import Q, QuerySet from validity import di -from validity.models import ComplianceSelector, VDataSource, VDevice -from validity.utils.misc import batched, datasource_sync +from validity.models import BackupPoint, ComplianceSelector, VDataSource, VDevice +from validity.utils.bulk import bulk_backup, datasource_sync +from validity.utils.misc import batched from ..data_models import FullRunTestsParams, SplitResult from ..exceptions import AbortScript from ..logger import Logger @@ -20,8 +21,11 @@ class SplitWorker(TerminateMixin): log_factory: Callable[[], Logger] = Logger datasource_sync_fn: Callable[[Iterable[VDataSource], Q], None] = datasource_sync device_batch_size: int = 2000 - datasource_queryset: QuerySet[VDataSource] = field(default_factory=VDataSource.objects.all) + datasource_queryset: QuerySet[VDataSource] = field( + default_factory=VDataSource.objects.set_attribute("permit_backup", False).all + ) device_queryset: QuerySet[VDevice] = field(default_factory=VDevice.objects.all) + backup_queryset: QuerySet[BackupPoint] = field(default_factory=BackupPoint.objects.filter(enabled=True).all) def datasources_to_sync(self, overriding_datasource: int | None, device_filter: Q) -> QuerySet[VDataSource]: if overriding_datasource: @@ -34,7 +38,9 @@ def datasources_to_sync(self, overriding_datasource: int | None, device_filter: ) return self.datasource_queryset.filter(pk__in=datasource_ids) - def sync_datasources(self, overriding_datasource: int | None, device_filter: Q, logger: Logger): + def sync_datasources( + self, overriding_datasource: int | None, device_filter: Q, logger: Logger + ) -> QuerySet[VDataSource]: datasources = self.datasources_to_sync(overriding_datasource, device_filter) if datasources.exists(): self.datasource_sync_fn(datasources, device_filter) @@ -44,6 +50,14 @@ def sync_datasources(self, overriding_datasource: int | None, device_filter: Q, ) else: logger.warning("No bound Data Sources found. Sync skipped") + return datasources + + def backup_datasources(self, datasources: QuerySet[VDataSource], logger: Logger) -> None: + backup_points = list(self.backup_queryset.filter(data_source__in=datasources)) + bulk_backup(backup_points) + if backup_points: + bp_names = ", ".join(bp.name for bp in backup_points) + logger.info(f"Data Sources have been backed up to the following Backup Points: {bp_names}") def _work_slices( self, selector_qs: QuerySet[ComplianceSelector], specific_devices: list[int], devices_per_worker: int @@ -106,6 +120,7 @@ def __call__(self, params: FullRunTestsParams) -> SplitResult: logger = self.log_factory() device_filter = params.get_device_filter() if params.sync_datasources: - self.sync_datasources(params.overriding_datasource, device_filter, logger) + datasources = self.sync_datasources(params.overriding_datasource, device_filter, logger) + self.backup_datasources(datasources, logger) slices = self.distribute_work(params, logger, device_filter) return SplitResult(log=logger.messages, slices=slices) diff --git a/validity/tables.py b/validity/tables.py index 0387b35..8570dcd 100644 --- a/validity/tables.py +++ b/validity/tables.py @@ -314,8 +314,9 @@ class BackupPointTable(NetBoxTable): name = Column(linkify=True) data_source = Column(linkify=True) method = ChoiceFieldColumn() + last_status = ChoiceFieldColumn() class Meta(NetBoxTable.Meta): model = models.BackupPoint - fields = ("name", "method", "backup_after_sync", "data_source", "last_uploaded") + fields = ("name", "method", "enabled", "data_source", "last_status", "last_uploaded") default_columns = fields diff --git a/validity/template_content.py b/validity/template_content.py index 596fa7a..abc4d59 100644 --- a/validity/template_content.py +++ b/validity/template_content.py @@ -39,9 +39,7 @@ def right_page(self): tenant_qs = Tenant.objects.restrict(self.context["request"].user, "view").filter( custom_field_data__data_source=instance.pk ) - if not (qs_count := tenant_qs.count()): - return "" - related_models = [(qs_count, tenant_qs.model, "cf_data_source")] + related_models = [(tenant_qs.count(), tenant_qs.model, "cf_data_source")] return self.render( "validity/inc/related_objects.html", extra_context={"related_models": related_models}, diff --git a/validity/templates/validity/backuppoint.html b/validity/templates/validity/backuppoint.html index 693ca1a..f6d47ac 100644 --- a/validity/templates/validity/backuppoint.html +++ b/validity/templates/validity/backuppoint.html @@ -17,8 +17,8 @@
Backup Point
{{ object.data_source | linkify }} - Do backup after Data Source sync - {{ object.backup_after_sync | checkmark }} + Enabled + {{ object.enabled | checkmark }} Backup Method @@ -39,8 +39,14 @@
Backup Point
+ Last Status + + {{ object.last_status | placeholder }} + {% if object.last_error %} | {{ object.last_error }}{% endif %} + + Last Uploaded - {{ object.last_uploaded | date:"Y-m-d G:i:s" }} + {{ object.last_uploaded | date:"Y-m-d G:i:s" | filler:"Never"}} diff --git a/validity/templatetags/validity.py b/validity/templatetags/validity.py index 64417f3..f232a3e 100644 --- a/validity/templatetags/validity.py +++ b/validity/templatetags/validity.py @@ -101,3 +101,11 @@ def isodatetime(value, spec="seconds"): value = localtime(value) if value.tzinfo else value text = f"{value.date().isoformat()} {value.time().isoformat(spec)}" return mark_safe(f'{text}') + + +@register.filter +def filler(value, fill_with="—"): + """ + Like placeholder, but with arbitrary fill_with value + """ + return value if value else mark_safe(fill_with) diff --git a/validity/utils/bulk.py b/validity/utils/bulk.py new file mode 100644 index 0000000..fca2f34 --- /dev/null +++ b/validity/utils/bulk.py @@ -0,0 +1,38 @@ +from concurrent.futures import ThreadPoolExecutor +from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable + +from core.exceptions import SyncError +from django.db.models import Q + + +if TYPE_CHECKING: + from validity.models import BackupPoint, VDataSource + + +def datasource_sync( + datasources: Iterable["VDataSource"], + device_filter: Q | None = None, + threads: int = 10, + fail_handler: Callable[["VDataSource", Exception], Any] | None = None, +): + """ + Parrallel sync of multiple Data Sources + """ + + def sync_func(datasource): + try: + datasource.sync(device_filter) + except SyncError as e: + if fail_handler: + fail_handler(datasource, e) + else: + raise + + with ThreadPoolExecutor(max_workers=threads) as tp: + any(tp.map(sync_func, datasources)) + + +def bulk_backup(backup_points: Collection["BackupPoint"], threads: int = 5) -> None: + with ThreadPoolExecutor(max_workers=threads) as tp: + any(tp.map(BackupPoint.do_backup, backup_points)) + BackupPoint.objects.bulk_update(backup_points, fields=["last_uploaded", "last_error", "last_status"]) diff --git a/validity/utils/misc.py b/validity/utils/misc.py index adf4831..83f10eb 100644 --- a/validity/utils/misc.py +++ b/validity/utils/misc.py @@ -1,20 +1,13 @@ import inspect -from concurrent.futures import ThreadPoolExecutor from contextlib import contextmanager, suppress from itertools import chain, islice from logging import Logger -from typing import TYPE_CHECKING, Any, Callable, Collection, Iterable +from typing import Callable, Collection, Iterable -from core.exceptions import SyncError -from django.db.models import Q from django.utils.functional import Promise from netbox.context import current_request -if TYPE_CHECKING: - from validity.models import VDataSource - - @contextmanager def null_request(): """ @@ -53,29 +46,6 @@ def reraise( raise raise_(*args, **kwargs) from catched_err -def datasource_sync( - datasources: Iterable["VDataSource"], - device_filter: Q | None = None, - threads: int = 10, - fail_handler: Callable[["VDataSource", Exception], Any] | None = None, -): - """ - Parrallel sync of multiple Data Sources - """ - - def sync_func(datasource): - try: - datasource.sync(device_filter) - except SyncError as e: - if fail_handler: - fail_handler(datasource, e) - else: - raise - - with ThreadPoolExecutor(max_workers=threads) as tp: - any(tp.map(sync_func, datasources)) - - def batched(iterable: Iterable, n: int, container: type = list): """ Batch data into containers of length n. Equal to python3.12 itertools.batched