diff --git a/client/ayon_core/addon/__init__.py b/client/ayon_core/addon/__init__.py index 6a7ce8a3cb..a8cf51ae25 100644 --- a/client/ayon_core/addon/__init__.py +++ b/client/ayon_core/addon/__init__.py @@ -1,42 +1,38 @@ -# -*- coding: utf-8 -*- +"""Addons for AYON.""" from . import click_wrap +from .base import ( + AddonsManager, + AYONAddon, + ProcessContext, + ProcessPreparationError, + load_addons, +) from .interfaces import ( + IHostAddon, IPluginPaths, - ITrayAddon, + ITraits, ITrayAction, + ITrayAddon, ITrayService, - IHostAddon, -) - -from .base import ( - ProcessPreparationError, - ProcessContext, - AYONAddon, - AddonsManager, - load_addons, ) - from .utils import ( ensure_addons_are_process_context_ready, ensure_addons_are_process_ready, ) - __all__ = ( - "click_wrap", - + "AYONAddon", + "AddonsManager", + "IHostAddon", "IPluginPaths", - "ITrayAddon", + "ITraits", "ITrayAction", + "ITrayAddon", "ITrayService", - "IHostAddon", - - "ProcessPreparationError", "ProcessContext", - "AYONAddon", - "AddonsManager", - "load_addons", - + "ProcessPreparationError", + "click_wrap", "ensure_addons_are_process_context_ready", "ensure_addons_are_process_ready", + "load_addons", ) diff --git a/client/ayon_core/addon/interfaces.py b/client/ayon_core/addon/interfaces.py index 72191e3453..51242e1fc1 100644 --- a/client/ayon_core/addon/interfaces.py +++ b/client/ayon_core/addon/interfaces.py @@ -1,16 +1,26 @@ +"""Addon interfaces for AYON.""" +from __future__ import annotations + from abc import ABCMeta, abstractmethod +from typing import TYPE_CHECKING, Callable, Optional, Type from ayon_core import resources +if TYPE_CHECKING: + from qtpy import QtWidgets + + from ayon_core.pipeline.traits import TraitBase + from ayon_core.tools.tray import TrayManager + class _AYONInterfaceMeta(ABCMeta): - """AYONInterface meta class to print proper string.""" + """AYONInterface metaclass to print proper string.""" - def __str__(self): - return "<'AYONInterface.{}'>".format(self.__name__) + def __str__(cls): + return f"<'AYONInterface.{cls.__name__}'>" - def __repr__(self): - return str(self) + def __repr__(cls): + return str(cls) class AYONInterface(metaclass=_AYONInterfaceMeta): @@ -24,7 +34,7 @@ class AYONInterface(metaclass=_AYONInterfaceMeta): in the interface. By default, interface does not have any abstract parts. """ - pass + log = None class IPluginPaths(AYONInterface): @@ -38,10 +48,25 @@ class IPluginPaths(AYONInterface): """ @abstractmethod - def get_plugin_paths(self): - pass + def get_plugin_paths(self) -> dict[str, list[str]]: + """Return plugin paths for addon. + + Returns: + dict[str, list[str]]: Plugin paths for addon. + + """ + + def _get_plugin_paths_by_type( + self, plugin_type: str) -> list[str]: + """Get plugin paths by type. + + Args: + plugin_type (str): Type of plugin paths to get. + + Returns: + list[str]: List of plugin paths. - def _get_plugin_paths_by_type(self, plugin_type): + """ paths = self.get_plugin_paths() if not paths or plugin_type not in paths: return [] @@ -54,14 +79,18 @@ def _get_plugin_paths_by_type(self, plugin_type): paths = [paths] return paths - def get_launcher_action_paths(self): + def get_launcher_action_paths(self) -> list[str]: """Receive launcher actions paths. Give addons ability to add launcher actions paths. + + Returns: + list[str]: List of launcher action paths. + """ return self._get_plugin_paths_by_type("actions") - def get_create_plugin_paths(self, host_name): + def get_create_plugin_paths(self, host_name: str) -> list[str]: """Receive create plugin paths. Give addons ability to add create plugin paths based on host name. @@ -72,11 +101,14 @@ def get_create_plugin_paths(self, host_name): Args: host_name (str): For which host are the plugins meant. - """ + Returns: + list[str]: List of create plugin paths. + + """ return self._get_plugin_paths_by_type("create") - def get_load_plugin_paths(self, host_name): + def get_load_plugin_paths(self, host_name: str) -> list[str]: """Receive load plugin paths. Give addons ability to add load plugin paths based on host name. @@ -87,11 +119,14 @@ def get_load_plugin_paths(self, host_name): Args: host_name (str): For which host are the plugins meant. - """ + Returns: + list[str]: List of load plugin paths. + + """ return self._get_plugin_paths_by_type("load") - def get_publish_plugin_paths(self, host_name): + def get_publish_plugin_paths(self, host_name: str) -> list[str]: """Receive publish plugin paths. Give addons ability to add publish plugin paths based on host name. @@ -102,11 +137,14 @@ def get_publish_plugin_paths(self, host_name): Args: host_name (str): For which host are the plugins meant. - """ + Returns: + list[str]: List of publish plugin paths. + + """ return self._get_plugin_paths_by_type("publish") - def get_inventory_action_paths(self, host_name): + def get_inventory_action_paths(self, host_name: str) -> list[str]: """Receive inventory action paths. Give addons ability to add inventory action plugin paths. @@ -117,77 +155,83 @@ def get_inventory_action_paths(self, host_name): Args: host_name (str): For which host are the plugins meant. - """ + Returns: + list[str]: List of inventory action plugin paths. + + """ return self._get_plugin_paths_by_type("inventory") class ITrayAddon(AYONInterface): """Addon has special procedures when used in Tray tool. - IMPORTANT: - The addon. still must be usable if is not used in tray even if - would do nothing. - """ + Important: + The addon. still must be usable if is not used in tray even if it + would do nothing. + """ tray_initialized = False - _tray_manager = None + _tray_manager: TrayManager = None _admin_submenu = None @abstractmethod - def tray_init(self): + def tray_init(self) -> None: """Initialization part of tray implementation. Triggered between `initialization` and `connect_with_addons`. This is where GUIs should be loaded or tray specific parts should be - prepared. - """ + prepared - pass + """ @abstractmethod - def tray_menu(self, tray_menu): + def tray_menu(self, tray_menu: QtWidgets.QMenu) -> None: """Add addon's action to tray menu.""" - pass - @abstractmethod - def tray_start(self): + def tray_start(self) -> None: """Start procedure in tray tool.""" - pass - @abstractmethod - def tray_exit(self): + def tray_exit(self) -> None: """Cleanup method which is executed on tray shutdown. This is place where all threads should be shut. + """ - pass + def execute_in_main_thread(self, callback: Callable) -> None: + """Pushes callback to the queue or process 'callback' on a main thread. - def execute_in_main_thread(self, callback): - """ Pushes callback to the queue or process 'callback' on a main thread + Some callbacks need to be processed on main thread (menu actions + must be added on main thread else they won't get triggered etc.) - Some callbacks need to be processed on main thread (menu actions - must be added on main thread or they won't get triggered etc.) - """ + Args: + callback (Callable): Function to be executed on main thread + """ if not self.tray_initialized: - # TODO Called without initialized tray, still main thread needed + # TODO (Illicit): Called without initialized tray, still + # main thread needed. try: callback() - except Exception: + except Exception: # noqa: BLE001 self.log.warning( - "Failed to execute {} in main thread".format(callback), - exc_info=True) + "Failed to execute %s callback in main thread", + str(callback), exc_info=True) return - self.manager.tray_manager.execute_in_main_thread(callback) - - def show_tray_message(self, title, message, icon=None, msecs=None): + self._tray_manager.tray_manager.execute_in_main_thread(callback) + + def show_tray_message( + self, + title: str, + message: str, + icon: Optional[QtWidgets.QSystemTrayIcon] = None, + msecs: Optional[int] = None) -> None: """Show tray message. Args: @@ -198,16 +242,22 @@ def show_tray_message(self, title, message, icon=None, msecs=None): msecs (int): Duration of message visibility in milliseconds. Default is 10000 msecs, may differ by Qt version. """ - if self._tray_manager: self._tray_manager.show_tray_message(title, message, icon, msecs) - def add_doubleclick_callback(self, callback): + def add_doubleclick_callback(self, callback: Callable) -> None: + """Add callback to be triggered on tray icon double click.""" if hasattr(self.manager, "add_doubleclick_callback"): self.manager.add_doubleclick_callback(self, callback) @staticmethod - def admin_submenu(tray_menu): + def admin_submenu(tray_menu: QtWidgets.QMenu) -> QtWidgets.QMenu: + """Get or create admin submenu. + + Returns: + QtWidgets.QMenu: Admin submenu. + + """ if ITrayAddon._admin_submenu is None: from qtpy import QtWidgets @@ -217,7 +267,18 @@ def admin_submenu(tray_menu): return ITrayAddon._admin_submenu @staticmethod - def add_action_to_admin_submenu(label, tray_menu): + def add_action_to_admin_submenu( + label: str, tray_menu: QtWidgets.QMenu) -> QtWidgets.QAction: + """Add action to admin submenu. + + Args: + label (str): Label of action. + tray_menu (QtWidgets.QMenu): Tray menu to add action to. + + Returns: + QtWidgets.QAction: Action added to admin submenu + + """ from qtpy import QtWidgets menu = ITrayAddon.admin_submenu(tray_menu) @@ -244,16 +305,15 @@ class ITrayAction(ITrayAddon): @property @abstractmethod - def label(self): + def label(self) -> str: """Service label showed in menu.""" - pass @abstractmethod - def on_action_trigger(self): + def on_action_trigger(self) -> None: """What happens on actions click.""" - pass - def tray_menu(self, tray_menu): + def tray_menu(self, tray_menu: QtWidgets.QMenu) -> None: + """Add action to tray menu.""" from qtpy import QtWidgets if self.admin_action: @@ -265,36 +325,44 @@ def tray_menu(self, tray_menu): action.triggered.connect(self.on_action_trigger) self._action_item = action - def tray_start(self): + def tray_start(self) -> None: # noqa: PLR6301 + """Start procedure in tray tool.""" return - def tray_exit(self): + def tray_exit(self) -> None: # noqa: PLR6301 + """Cleanup method which is executed on tray shutdown.""" return class ITrayService(ITrayAddon): + """Tray service Interface.""" # Module's property - menu_action = None + menu_action: QtWidgets.QAction = None # Class properties - _services_submenu = None - _icon_failed = None - _icon_running = None - _icon_idle = None + _services_submenu: QtWidgets.QMenu = None + _icon_failed: QtWidgets.QIcon = None + _icon_running: QtWidgets.QIcon = None + _icon_idle: QtWidgets.QIcon = None @property @abstractmethod - def label(self): + def label(self) -> str: """Service label showed in menu.""" - pass - # TODO be able to get any sort of information to show/print + # TODO (Illicit): be able to get any sort of information to show/print # @abstractmethod # def get_service_info(self): # pass @staticmethod - def services_submenu(tray_menu): + def services_submenu(tray_menu: QtWidgets.QMenu) -> QtWidgets.QMenu: + """Get or create services submenu. + + Returns: + QtWidgets.QMenu: Services submenu. + + """ if ITrayService._services_submenu is None: from qtpy import QtWidgets @@ -304,13 +372,15 @@ def services_submenu(tray_menu): return ITrayService._services_submenu @staticmethod - def add_service_action(action): + def add_service_action(action: QtWidgets.QAction) -> None: + """Add service action to services submenu.""" ITrayService._services_submenu.addAction(action) if not ITrayService._services_submenu.menuAction().isVisible(): ITrayService._services_submenu.menuAction().setVisible(True) @staticmethod - def _load_service_icons(): + def _load_service_icons() -> None: + """Load service icons.""" from qtpy import QtGui ITrayService._failed_icon = QtGui.QIcon( @@ -324,24 +394,43 @@ def _load_service_icons(): ) @staticmethod - def get_icon_running(): + def get_icon_running() -> QtWidgets.QIcon: + """Get running icon. + + Returns: + QtWidgets.QIcon: Returns "running" icon. + + """ if ITrayService._icon_running is None: ITrayService._load_service_icons() return ITrayService._icon_running @staticmethod - def get_icon_idle(): + def get_icon_idle() -> QtWidgets.QIcon: + """Get idle icon. + + Returns: + QtWidgets.QIcon: Returns "idle" icon. + + """ if ITrayService._icon_idle is None: ITrayService._load_service_icons() return ITrayService._icon_idle @staticmethod - def get_icon_failed(): - if ITrayService._failed_icon is None: + def get_icon_failed() -> QtWidgets.QIcon: + """Get failed icon. + + Returns: + QtWidgets.QIcon: Returns "failed" icon. + + """ + if ITrayService._icon_failed is None: ITrayService._load_service_icons() - return ITrayService._failed_icon + return ITrayService._icon_failed - def tray_menu(self, tray_menu): + def tray_menu(self, tray_menu: QtWidgets.QMenu) -> None: + """Add service to tray menu.""" from qtpy import QtWidgets action = QtWidgets.QAction( @@ -354,21 +443,18 @@ def tray_menu(self, tray_menu): self.set_service_running_icon() - def set_service_running_icon(self): + def set_service_running_icon(self) -> None: """Change icon of an QAction to green circle.""" - if self.menu_action: self.menu_action.setIcon(self.get_icon_running()) - def set_service_failed_icon(self): + def set_service_failed_icon(self) -> None: """Change icon of an QAction to red circle.""" - if self.menu_action: self.menu_action.setIcon(self.get_icon_failed()) - def set_service_idle_icon(self): + def set_service_idle_icon(self) -> None: """Change icon of an QAction to orange circle.""" - if self.menu_action: self.menu_action.setIcon(self.get_icon_idle()) @@ -378,18 +464,29 @@ class IHostAddon(AYONInterface): @property @abstractmethod - def host_name(self): + def host_name(self) -> str: """Name of host which addon represents.""" - pass - - def get_workfile_extensions(self): + def get_workfile_extensions(self) -> list[str]: # noqa: PLR6301 """Define workfile extensions for host. Not all hosts support workfiles thus this is optional implementation. Returns: List[str]: Extensions used for workfiles with dot. - """ + """ return [] + + +class ITraits(AYONInterface): + """Interface for traits.""" + + @abstractmethod + def get_addon_traits(self) -> list[Type[TraitBase]]: + """Get trait classes for the addon. + + Returns: + list[Type[TraitBase]]: Traits for the addon. + + """ diff --git a/client/ayon_core/pipeline/traits/README.md b/client/ayon_core/pipeline/traits/README.md new file mode 100644 index 0000000000..1566b4f353 --- /dev/null +++ b/client/ayon_core/pipeline/traits/README.md @@ -0,0 +1,325 @@ +# Representations and traits + +## Introduction + +The Representation is the lowest level entity, describing the concrete data chunk that +pipeline can act on. It can be specific file or just a set of metadata. Idea is that one +product version can have multiple representations - **Image** product can be jpeg or tiff, both formats are representation of the same source. + +### Brief look into the past (and current state) + +So far, representation was defined as dict-like structure: +```python +{ + "name": "foo", + "ext": "exr", + "files": ["foo_001.exr", "foo_002.exr"], + "stagingDir": "/bar/dir" +} +``` + +This is minimal form, but it can have additional keys like `frameStart`, `fps`, `resolutionWidth`, and more. Thare is also `tags` key that can hold `review`, `thumbnail`, `delete`, `toScanline` and other tag that are controlling the processing. + +This will be *"translated"* to similar structure in database: + +```python +{ + "name": "foo", + "version_id": "...", + "files": [ + { + "id": ..., + "hash": ..., + "name": "foo_001.exr", + "path": "{root[work]}/bar/dir/foo_001.exr", + "size": 1234, + "hash_type": "...", + }, + ... + ], + "attrib": { + "path": "root/bar/dir/foo_001.exr", + "template": "{root[work]}/{project[name]}...", + }, + "data": { + "context": { + "ext": "exr", + "root": {...}, + ... + }, + "active": True + ... + +} +``` + +There are also some assumptions and limitations - like that if `files` in the +representation are list they need to be sequence of files (it can't be a bunch of +unrelated files). + +This system is very flexible in one way, but it lack few very important things: + +- it is not clearly defined - you can add easily keys, values, tags but without +unforeseeable +consequences +- it cannot handle "bundles" - multiple files that needs to be versioned together and +belong together +- it cannot describe important information that you can't get from the file itself or +it is very pricy (like axis orientation and units from alembic files) + + +### New Representation model + +The idea about new representation model is obviously around solving points mentioned +above and also adding some benefits, like consistent IDE hints, typing, built-in + validators and much more. + +### Design + +The new representation is "just" a dictionary of traits. Trait can be anything provided +it is based on `TraitBase`. It shouldn't really duplicate information that is +available in a moment of loading (or any usage) by other means. It should contain +information that couldn't be determined by the file, or the AYON context. Some of +those traits are aligned with [OpenAssetIO Media Creation](https://github.com/OpenAssetIO/OpenAssetIO-MediaCreation) with hopes of maintained compatibility (it +should be easy enough to convert between OpenAssetIO Traits and AOYN Traits). + +#### Details: Representation + +`Representation` has methods to deal with adding, removing, getting +traits. It has all the usual stuff like `get_trait()`, `add_trait()`, +`remove_trait()`, etc. But it also has plural forms so you can get/set +several traits at the same time with `get_traits()` and so on. +`Representation` also behaves like dictionary. so you can access/set +traits in the same way as you would do with dict: + +```python +# import Image trait +from ayon_core.pipeline.traits import Image, Tagged, Representation + + +# create new representation with name "foo" and add Image trait to it +rep = Representation(name="foo", traits=[Image()]) + +# you can add another trait like so +rep.add_trait(Tagged(tags=["tag"])) + +# or you can +rep[Tagged.id] = Tagged(tags=["tag"]) + +# and getting them in analogous +image = rep.get_trait(Image) + +# or +image = rep[Image.id] +``` + +> [!NOTE] +> Trait and their ids - every Trait has its id as s string with +> version appended - so **Image** has `ayon.2d.Image.v1`. This is is used on +> several places (you see its use above for indexing traits). When querying, +> you can also omit the version at the end and it will try its best to find +> the latest possible version. More on that in [Traits]() + +You can construct the `Representation` from dictionary (for example +serialized as JSON) using `Representation.from_dict()`, or you can +serialize `Representation` to dict to store with `Representation.traits_as_dict()`. + +Every time representation is created, new id is generated. You can pass existing +id when creating new representation instance. + +##### Equality + +Two Representations are equal if: +- their names are the same +- their IDs are the same +- they have the same traits +- the traits have the same values + +##### Validation + +Representation has `validate()` method that will run `validate()` on +all it's traits. + +#### Details: Traits + +As mentioned there are several traits defined directly in **ayon-core**. They are namespaced +to different packages based on their use: + +| namespace | trait | description +|---|---|--- +| color | ColorManaged | hold color management information +| content | MimeType | use MIME type (RFC 2046) to describe content (like image/jpeg) +| | LocatableContent | describe some location (file or URI) +| | FileLocation | path to file, with size and checksum +| | FileLocations | list of `FileLocation` +| | RootlessLocation | Path where root is replaced with AYON root token +| | Compressed | describes compression (of file or other) +| | Bundle | list of list of Traits - compound of inseparable "sub-representations" +| | Fragment | compound type marking the representation as a part of larger group of representations +| cryptography | DigitallySigned | Type traits marking data to be digitally signed +| | PGPSigned | Representation is signed by [PGP](https://www.openpgp.org/) +| lifecycle | Transient | Marks the representation to be temporary - not to be stored. +| | Persistent | Representation should be integrated (stored). Opposite of Transient. +| meta | Tagged | holds list of tag strings. +| | TemplatePath | Template consisted of tokens/keys and data to be used to resolve the template into string +| | Variant | Used to differentiate between data variants of the same output (mp4 as h.264 and h.265 for example) +| | KeepOriginalLocation | Marks the representation to keep the original location of the file +| | KeepOriginalName | Marks the representation to keep the original name of the file +| | SourceApplication | Holds information about producing application, about it's version, variant and platform. +| | IntendedUse | For specifying the intended use of the representation if it cannot be easily determined by other traits. +| three dimensional | Spatial | Spatial information like up-axis, units and handedness. +| | Geometry | Type trait to mark the representation as a geometry. +| | Shader | Type trait to mark the representation as a Shader. +| | Lighting | Type trait to mark the representation as Lighting. +| | IESProfile | States that the representation is IES Profile +| time | FrameRanged | Contains start and end frame information with in and out. +| | Handless | define additional frames at the end or beginning and if those frames are inclusive of the range or not. +| | Sequence | Describes sequence of frames and how the frames are defined in that sequence. +| | SMPTETimecode | Adds timecode information in SMPTE format. +| | Static | Marks the content as not time-variant. +| two dimensional | Image | Type traits of image. +| | PixelBased | Defines resolution and pixel aspect for the image data. +| | Planar | Whether pixel data is in planar configuration or packed. +| | Deep | Image encodes deep pixel data. +| | Overscan | holds overscan/underscan information (added pixels to bottom/sides) +| | UDIM | Representation is UDIM tile set + +Traits are Python data classes with optional +validation and helper methods. If they implement `TraitBase.validate(Representation)` method, they can validate against all other traits +in the representation if needed. + +> [!NOTE] +> They could be easily converted to [Pydantic models](https://docs.pydantic.dev/latest/) but since this must run in diverse Python environments inside DCC, we cannot +> easily resolve pydantic-core dependency (as it is binary written in Rust). + +> [!NOTE] +> Every trait has id, name and some human readable description. Every trait +> also has `persistent` property that is by default set to True. This +> Controls whether this trait should be stored with the persistent representation +> or not. Useful for traits to be used just to control the publishing process. + +## Examples + +Create simple image representation to be integrated by AYON: + +```python +from pathlib import Path +from ayon_core.pipeline.traits import ( + FileLocation, + Image, + PixelBased, + Persistent, + Representation, + Static, +) + +rep = Representation(name="reference image", traits=[ + FileLocation( + file_path=Path("/foo/bar/baz.exr"), + file_size=1234, + file_hash="sha256:...", + ), + Image(), + PixelBased( + display_window_width=1920, + display_window_height=1080, + pixel_aspect_ratio=1.0, + ), + Persistent(), + Static() +]) + +# validate the representation + +try: + rep.validate() +except TraitValidationError as e: + print(f"Representation {rep.name} is invalid: {e}") + +``` + +To work with the resolution of such representation: + +```python + +try: + width = rep.get_trait(PixelBased).display_window_width + # or like this: + height = rep[PixelBased.id].display_window_height +except MissingTraitError: + print(f"resolution isn't set on {rep.name}") +``` + +Accessing non-existent traits will result in exception. To test if +representation has some specific trait, you can use `.contains_trait()` method. + + +You can also prepare the whole representation data as a dict and +create it from it: + +```python +rep_dict = { + "ayon.content.FileLocation.v1": { + "file_path": Path("/path/to/file"), + "file_size": 1024, + "file_hash": None, + }, + "ayon.two_dimensional.Image": {}, + "ayon.two_dimensional.PixelBased": { + "display_window_width": 1920, + "display_window_height": 1080, + "pixel_aspect_ratio": 1.0, + }, + "ayon.two_dimensional.Planar": { + "planar_configuration": "RGB", + } +} + +rep = Representation.from_dict(name="image", rep_dict) + +``` + + +## Addon specific traits + +Addon can define its own traits. To do so, it needs to implement `ITraits` interface: + +```python +from ayon_core.pipeline.traits import TraitBase +from ayon_core.addon import ( + AYONAddon, + ITraits, +) + +class MyTraitFoo(TraitBase): + id = "myaddon.mytrait.foo.v1" + name = "My Trait Foo" + description = "This is my trait foo" + persistent = True + + +class MyTraitBar(TraitBase): + id = "myaddon.mytrait.bar.v1" + name = "My Trait Bar" + description = "This is my trait bar" + persistent = True + + +class MyAddon(AYONAddon, ITraits): + def __init__(self): + super().__init__() + + def get_addon_traits(self): + return [ + MyTraitFoo, + MyTraitBar, + ] +``` + +## Developer notes + +### Pydantic models +If you want to use MyPy linter, you need to make sure that +optional fields typed as `Optional[Type]` needs to set default value +using `default` or `default_factory` parameter. Otherwise MyPy will +complain about missing named arguments. diff --git a/client/ayon_core/pipeline/traits/__init__.py b/client/ayon_core/pipeline/traits/__init__.py new file mode 100644 index 0000000000..645064d59f --- /dev/null +++ b/client/ayon_core/pipeline/traits/__init__.py @@ -0,0 +1,112 @@ +"""Trait classes for the pipeline.""" +from .color import ColorManaged +from .content import ( + Bundle, + Compressed, + FileLocation, + FileLocations, + Fragment, + LocatableContent, + MimeType, + RootlessLocation, +) +from .cryptography import DigitallySigned, PGPSigned +from .lifecycle import Persistent, Transient +from .meta import ( + IntendedUse, + KeepOriginalLocation, + SourceApplication, + Tagged, + TemplatePath, + Variant, +) +from .representation import Representation +from .temporal import ( + FrameRanged, + GapPolicy, + Handles, + Sequence, + SMPTETimecode, + Static, +) +from .three_dimensional import Geometry, IESProfile, Lighting, Shader, Spatial +from .trait import ( + MissingTraitError, + TraitBase, + TraitValidationError, +) +from .two_dimensional import ( + UDIM, + Deep, + Image, + Overscan, + PixelBased, + Planar, +) +from .utils import ( + get_sequence_from_files, +) + +__all__ = [ # noqa: RUF022 + # base + "Representation", + "TraitBase", + "MissingTraitError", + "TraitValidationError", + + # color + "ColorManaged", + + # content + "Bundle", + "Compressed", + "FileLocation", + "FileLocations", + "Fragment", + "LocatableContent", + "MimeType", + "RootlessLocation", + + # cryptography + "DigitallySigned", + "PGPSigned", + + # life cycle + "Persistent", + "Transient", + + # meta + "IntendedUse", + "KeepOriginalLocation", + "SourceApplication", + "Tagged", + "TemplatePath", + "Variant", + + # temporal + "FrameRanged", + "GapPolicy", + "Handles", + "Sequence", + "SMPTETimecode", + "Static", + + # three-dimensional + "Geometry", + "IESProfile", + "Lighting", + "Shader", + "Spatial", + + # two-dimensional + "Compressed", + "Deep", + "Image", + "Overscan", + "PixelBased", + "Planar", + "UDIM", + + # utils + "get_sequence_from_files", +] diff --git a/client/ayon_core/pipeline/traits/color.py b/client/ayon_core/pipeline/traits/color.py new file mode 100644 index 0000000000..491131c8bc --- /dev/null +++ b/client/ayon_core/pipeline/traits/color.py @@ -0,0 +1,30 @@ +"""Color management related traits.""" +from __future__ import annotations + +from dataclasses import dataclass +from typing import ClassVar, Optional + +from .trait import TraitBase + + +@dataclass +class ColorManaged(TraitBase): + """Color managed trait. + + Holds color management information. Can be used with Image related + traits to define color space and config. + + Sync with OpenAssetIO MediaCreation Traits. + + Attributes: + color_space (str): An OCIO colorspace name available + in the "current" OCIO context. + config (str): An OCIO config name defining color space. + """ + + id: ClassVar[str] = "ayon.color.ColorManaged.v1" + name: ClassVar[str] = "ColorManaged" + color_space: str + description: ClassVar[str] = "Color Managed trait." + persistent: ClassVar[bool] = True + config: Optional[str] = None diff --git a/client/ayon_core/pipeline/traits/content.py b/client/ayon_core/pipeline/traits/content.py new file mode 100644 index 0000000000..9bb43fcdb3 --- /dev/null +++ b/client/ayon_core/pipeline/traits/content.py @@ -0,0 +1,484 @@ +"""Content traits for the pipeline.""" +from __future__ import annotations + +import contextlib +import re +from dataclasses import dataclass + +# TC003 is there because Path in TYPECHECKING will fail in tests +from pathlib import Path # noqa: TC003 +from typing import ClassVar, Generator, Optional + +from .representation import Representation +from .temporal import FrameRanged, Handles, Sequence +from .trait import ( + MissingTraitError, + TraitBase, + TraitValidationError, +) +from .two_dimensional import UDIM +from .utils import get_sequence_from_files + + +@dataclass +class MimeType(TraitBase): + """MimeType trait model. + + This model represents a mime type trait. For example, image/jpeg. + It is used to describe the type of content in a representation regardless + of the file extension. + + For more information, see RFC 2046 and RFC 4288 (and related RFCs). + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + mime_type (str): Mime type like image/jpeg. + """ + + name: ClassVar[str] = "MimeType" + description: ClassVar[str] = "MimeType Trait Model" + id: ClassVar[str] = "ayon.content.MimeType.v1" + persistent: ClassVar[bool] = True + mime_type: str + + +@dataclass +class LocatableContent(TraitBase): + """LocatableContent trait model. + + This model represents a locatable content trait. Locatable content + is content that has a location. It doesn't have to be a file - it could + be a URL or some other location. + + Sync with OpenAssetIO MediaCreation Traits. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + location (str): Location. + is_templated (Optional[bool]): Is the location templated? Default is None. + """ + + name: ClassVar[str] = "LocatableContent" + description: ClassVar[str] = "LocatableContent Trait Model" + id: ClassVar[str] = "ayon.content.LocatableContent.v1" + persistent: ClassVar[bool] = True + location: str + is_templated: Optional[bool] = None + + +@dataclass +class FileLocation(TraitBase): + """FileLocation trait model. + + This model represents a file path. It is a specialization of the + LocatableContent trait. It is adding optional file size and file hash + for easy access to file information. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + file_path (str): File path. + file_size (Optional[int]): File size in bytes. + file_hash (Optional[str]): File hash. + """ + + name: ClassVar[str] = "FileLocation" + description: ClassVar[str] = "FileLocation Trait Model" + id: ClassVar[str] = "ayon.content.FileLocation.v1" + persistent: ClassVar[bool] = True + file_path: Path + file_size: Optional[int] = None + file_hash: Optional[str] = None + + +@dataclass +class FileLocations(TraitBase): + """FileLocation trait model. + + This model represents a file path. It is a specialization of the + LocatableContent trait. It is adding optional file size and file hash + for easy access to file information. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + file_paths (list of FileLocation): File locations. + + """ + + name: ClassVar[str] = "FileLocations" + description: ClassVar[str] = "FileLocations Trait Model" + id: ClassVar[str] = "ayon.content.FileLocations.v1" + persistent: ClassVar[bool] = True + file_paths: list[FileLocation] + + def get_files(self) -> Generator[Path, None, None]: + """Get all file paths from the trait. + + This method will return all file paths from the trait. + + Yields: + Path: List of file paths. + + """ + for file_location in self.file_paths: + yield file_location.file_path + + def get_file_location_for_frame( + self, + frame: int, + sequence_trait: Optional[Sequence] = None, + ) -> Optional[FileLocation]: + """Get file location for a frame. + + This method will return the file location for a given frame. If the + frame is not found in the file paths, it will return None. + + Args: + frame (int): Frame to get the file location for. + sequence_trait (Sequence): Sequence trait to get the + frame range specs from. + + Returns: + Optional[FileLocation]: File location for the frame. + + """ + frame_regex = re.compile(r"\.(?P(?P0*)\d+)\.\D+\d?$") + if sequence_trait and sequence_trait.frame_regex: + frame_regex = sequence_trait.get_frame_pattern() + + for location in self.file_paths: + result = re.search(frame_regex, location.file_path.name) + if result: + frame_index = int(result.group("index")) + if frame_index == frame: + return location + return None + + def validate_trait(self, representation: Representation) -> None: + """Validate the trait. + + This method validates the trait against others in the representation. + In particular, it checks that the sequence trait is present and if + so, it will compare the frame range to the file paths. + + Args: + representation (Representation): Representation to validate. + + Raises: + TraitValidationError: If the trait is invalid within the + representation. + + """ + super().validate_trait(representation) + if len(self.file_paths) == 0: + # If there are no file paths, we can't validate + msg = "No file locations defined (empty list)" + raise TraitValidationError(self.name, msg) + if representation.contains_trait(FrameRanged): + self._validate_frame_range(representation) + if not representation.contains_trait(Sequence) \ + and not representation.contains_trait(UDIM): + # we have multiple files, but it is not a sequence + # or UDIM tile set what it it then? If the files are not related + # to each other then this representation is invalid. + msg = ( + "Multiple file locations defined, but no Sequence " + "or UDIM trait defined. If the files are not related to " + "each other, the representation is invalid." + ) + raise TraitValidationError(self.name, msg) + + def _validate_frame_range(self, representation: Representation) -> None: + """Validate the frame range against the file paths. + + If the representation contains a FrameRanged trait, this method will + validate the frame range against the file paths. If the frame range + does not match the file paths, the trait is invalid. It takes into + account the Handles and Sequence traits. + + Args: + representation (Representation): Representation to validate. + + Raises: + TraitValidationError: If the trait is invalid within the + representation. + + """ + tmp_frame_ranged: FrameRanged = get_sequence_from_files( + [f.file_path for f in self.file_paths]) + + frames_from_spec: list[int] = [] + with contextlib.suppress(MissingTraitError): + sequence: Sequence = representation.get_trait(Sequence) + frame_regex = sequence.get_frame_pattern() + if sequence.frame_spec: + frames_from_spec = sequence.get_frame_list( + self, frame_regex) + + frame_start_with_handles, frame_end_with_handles = \ + self._get_frame_info_with_handles(representation, frames_from_spec) + + if frame_start_with_handles \ + and tmp_frame_ranged.frame_start != frame_start_with_handles: + # If the detected frame range does not match the combined + # FrameRanged and Handles trait, the + # trait is invalid. + msg = ( + f"Frame range defined by {self.name} " + f"({tmp_frame_ranged.frame_start}-" + f"{tmp_frame_ranged.frame_end}) " + "in files does not match " + "frame range " + f"({frame_start_with_handles}-" + f"{frame_end_with_handles}) defined in FrameRanged trait." + ) + + raise TraitValidationError(self.name, msg) + + if frames_from_spec: + if len(frames_from_spec) != len(self.file_paths): + # If the number of file paths does not match the frame range, + # the trait is invalid + msg = ( + f"Number of file locations ({len(self.file_paths)}) " + "does not match frame range defined by frame spec " + "on Sequence trait: " + f"({len(frames_from_spec)})" + ) + raise TraitValidationError(self.name, msg) + # if there is frame spec on the Sequence trait + # we should not validate the frame range from the files. + # the rest is validated by Sequence validators. + return + + length_with_handles: int = ( + frame_end_with_handles - frame_start_with_handles + 1 + ) + + if len(self.file_paths) != length_with_handles: + # If the number of file paths does not match the frame range, + # the trait is invalid + msg = ( + f"Number of file locations ({len(self.file_paths)}) " + "does not match frame range " + f"({length_with_handles})" + ) + raise TraitValidationError(self.name, msg) + + frame_ranged: FrameRanged = representation.get_trait(FrameRanged) + + if frame_start_with_handles != tmp_frame_ranged.frame_start or \ + frame_end_with_handles != tmp_frame_ranged.frame_end: + # If the frame range does not match the FrameRanged trait, the + # trait is invalid. Note that we don't check the frame rate + # because it is not stored in the file paths and is not + # determined by `get_sequence_from_files`. + msg = ( + "Frame range " + f"({frame_ranged.frame_start}-{frame_ranged.frame_end}) " + "in sequence trait does not match " + "frame range " + f"({tmp_frame_ranged.frame_start}-" + f"{tmp_frame_ranged.frame_end}) " + ) + raise TraitValidationError(self.name, msg) + + @staticmethod + def _get_frame_info_with_handles( + representation: Representation, + frames_from_spec: list[int]) -> tuple[int, int]: + """Get the frame range with handles from the representation. + + This will return frame start and frame end with handles calculated + in if there actually is the Handles trait in the representation. + + Args: + representation (Representation): Representation to get the frame + range from. + frames_from_spec (list[int]): List of frames from the frame spec. + This list is modified in place to take into + account the handles. + + Mutates: + frames_from_spec: List of frames from the frame spec. + + Returns: + tuple[int, int]: Start and end frame with handles. + + """ + frame_start = frame_end = 0 + frame_start_handle = frame_end_handle = 0 + # If there is no sequence trait, we can't validate it + if frames_from_spec and representation.contains_trait(FrameRanged): + # if there is no FrameRanged trait (but really there should be) + # we can use the frame range from the frame spec + frame_start = min(frames_from_spec) + frame_end = max(frames_from_spec) + + # Handle the frame range + with contextlib.suppress(MissingTraitError): + frame_start = representation.get_trait(FrameRanged).frame_start + frame_end = representation.get_trait(FrameRanged).frame_end + + # Handle the handles :P + with contextlib.suppress(MissingTraitError): + handles: Handles = representation.get_trait(Handles) + if not handles.inclusive: + # if handless are exclusive, we need to adjust the frame range + frame_start_handle = handles.frame_start_handle or 0 + frame_end_handle = handles.frame_end_handle or 0 + if frames_from_spec: + frames_from_spec.extend( + range(frame_start - frame_start_handle, frame_start) + ) + frames_from_spec.extend( + range(frame_end + 1, frame_end_handle + frame_end + 1) + ) + + frame_start_with_handles = frame_start - frame_start_handle + frame_end_with_handles = frame_end + frame_end_handle + + return frame_start_with_handles, frame_end_with_handles + + +@dataclass +class RootlessLocation(TraitBase): + """RootlessLocation trait model. + + RootlessLocation trait is a trait that represents a file path that is + without specific root. To obtain absolute path, the root needs to be + resolved by AYON. Rootless path can be used on multiple platforms. + + Example:: + + RootlessLocation( + rootless_path="{root[work]}/project/asset/asset.jpg" + ) + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + rootless_path (str): Rootless path. + """ + + name: ClassVar[str] = "RootlessLocation" + description: ClassVar[str] = "RootlessLocation Trait Model" + id: ClassVar[str] = "ayon.content.RootlessLocation.v1" + persistent: ClassVar[bool] = True + rootless_path: str + + +@dataclass +class Compressed(TraitBase): + """Compressed trait model. + + This trait can hold information about compressed content. What type + of compression is used. + + Example:: + + Compressed("gzip") + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + compression_type (str): Compression type. + """ + + name: ClassVar[str] = "Compressed" + description: ClassVar[str] = "Compressed Trait" + id: ClassVar[str] = "ayon.content.Compressed.v1" + persistent: ClassVar[bool] = True + compression_type: str + + +@dataclass +class Bundle(TraitBase): + """Bundle trait model. + + This model list of independent Representation traits + that are bundled together. This is useful for representing + a collection of sub-entities that are part of a single + entity. You can easily reconstruct representations from + the bundle. + + Example:: + + Bundle( + items=[ + [ + MimeType(mime_type="image/jpeg"), + FileLocation(file_path="/path/to/file.jpg") + ], + [ + + MimeType(mime_type="image/png"), + FileLocation(file_path="/path/to/file.png") + ] + ] + ) + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + items (list[list[TraitBase]]): List of representations. + """ + + name: ClassVar[str] = "Bundle" + description: ClassVar[str] = "Bundle Trait" + id: ClassVar[str] = "ayon.content.Bundle.v1" + persistent: ClassVar[bool] = True + items: list[list[TraitBase]] + + def to_representations(self) -> Generator[Representation]: + """Convert bundle to representations. + + Yields: + Representation: Representation of the bundle. + + """ + for idx, item in enumerate(self.items): + yield Representation(name=f"{self.name} {idx}", traits=item) + + +@dataclass +class Fragment(TraitBase): + """Fragment trait model. + + This model represents a fragment trait. A fragment is a part of + a larger entity that is represented by another representation. + + Example:: + + main_representation = Representation(name="parent", + traits=[], + ) + fragment_representation = Representation( + name="fragment", + traits=[ + Fragment(parent=main_representation.id), + ] + ) + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + parent (str): Parent representation id. + """ + + name: ClassVar[str] = "Fragment" + description: ClassVar[str] = "Fragment Trait" + id: ClassVar[str] = "ayon.content.Fragment.v1" + persistent: ClassVar[bool] = True + parent: str diff --git a/client/ayon_core/pipeline/traits/cryptography.py b/client/ayon_core/pipeline/traits/cryptography.py new file mode 100644 index 0000000000..d9445bd543 --- /dev/null +++ b/client/ayon_core/pipeline/traits/cryptography.py @@ -0,0 +1,41 @@ +"""Cryptography traits.""" +from __future__ import annotations + +from dataclasses import dataclass +from typing import ClassVar, Optional + +from .trait import TraitBase + + +@dataclass +class DigitallySigned(TraitBase): + """Digitally signed trait. + + This type trait means that the data is digitally signed. + + Attributes: + signature (str): Digital signature. + """ + + id: ClassVar[str] = "ayon.cryptography.DigitallySigned.v1" + name: ClassVar[str] = "DigitallySigned" + description: ClassVar[str] = "Digitally signed trait." + persistent: ClassVar[bool] = True + +@dataclass +class PGPSigned(DigitallySigned): + """PGP signed trait. + + This trait holds PGP (RFC-4880) signed data. + + Attributes: + signed_data (str): Signed data. + clear_text (str): Clear text. + """ + + id: ClassVar[str] = "ayon.cryptography.PGPSigned.v1" + name: ClassVar[str] = "PGPSigned" + description: ClassVar[str] = "PGP signed trait." + persistent: ClassVar[bool] = True + signed_data: str + clear_text: Optional[str] = None diff --git a/client/ayon_core/pipeline/traits/lifecycle.py b/client/ayon_core/pipeline/traits/lifecycle.py new file mode 100644 index 0000000000..b1b72f8fcb --- /dev/null +++ b/client/ayon_core/pipeline/traits/lifecycle.py @@ -0,0 +1,77 @@ +"""Lifecycle traits.""" +from dataclasses import dataclass +from typing import ClassVar + +from .trait import TraitBase, TraitValidationError + + +@dataclass +class Transient(TraitBase): + """Transient trait model. + + Transient trait marks representation as transient. Such representations + are not persisted in the system. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + """ + + name: ClassVar[str] = "Transient" + description: ClassVar[str] = "Transient Trait Model" + id: ClassVar[str] = "ayon.lifecycle.Transient.v1" + persistent: ClassVar[bool] = True # see note in Persistent + + def validate_trait(self, representation) -> None: # noqa: ANN001 + """Validate representation is not Persistent. + + Args: + representation (Representation): Representation model. + + Raises: + TraitValidationError: If representation is marked as both + Persistent and Transient. + + """ + if representation.contains_trait(Persistent): + msg = "Representation is marked as both Persistent and Transient." + raise TraitValidationError(self.name, msg) + + +@dataclass +class Persistent(TraitBase): + """Persistent trait model. + + Persistent trait is opposite to transient trait. It marks representation + as persistent. Such representations are persisted in the system (e.g. in + the database). + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + """ + + name: ClassVar[str] = "Persistent" + description: ClassVar[str] = "Persistent Trait Model" + id: ClassVar[str] = "ayon.lifecycle.Persistent.v1" + # note that this affects persistence of the trait itself, not + # the representation. This is a class variable, so it is shared + # among all instances of the class. + persistent: bool = True + + def validate_trait(self, representation) -> None: # noqa: ANN001 + """Validate representation is not Transient. + + Args: + representation (Representation): Representation model. + + Raises: + TraitValidationError: If representation is marked + as both Persistent and Transient. + + """ + if representation.contains_trait(Transient): + msg = "Representation is marked as both Persistent and Transient." + raise TraitValidationError(self.name, msg) diff --git a/client/ayon_core/pipeline/traits/meta.py b/client/ayon_core/pipeline/traits/meta.py new file mode 100644 index 0000000000..3bf4a87a0b --- /dev/null +++ b/client/ayon_core/pipeline/traits/meta.py @@ -0,0 +1,162 @@ +"""Metadata traits.""" +from __future__ import annotations + +from dataclasses import dataclass +from typing import ClassVar, List, Optional + +from .trait import TraitBase + + +@dataclass +class Tagged(TraitBase): + """Tagged trait model. + + This trait can hold list of tags. + + Example:: + + Tagged(tags=["tag1", "tag2"]) + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + tags (List[str]): Tags. + """ + + name: ClassVar[str] = "Tagged" + description: ClassVar[str] = "Tagged Trait Model" + id: ClassVar[str] = "ayon.meta.Tagged.v1" + persistent: ClassVar[bool] = True + tags: List[str] + + +@dataclass +class TemplatePath(TraitBase): + """TemplatePath trait model. + + This model represents a template path with formatting data. + Template path can be Anatomy template and data is used to format it. + + Example:: + + TemplatePath(template="path/{key}/file", data={"key": "to"}) + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + template (str): Template path. + data (dict[str]): Formatting data. + """ + + name: ClassVar[str] = "TemplatePath" + description: ClassVar[str] = "Template Path Trait Model" + id: ClassVar[str] = "ayon.meta.TemplatePath.v1" + persistent: ClassVar[bool] = True + template: str + data: dict + + +@dataclass +class Variant(TraitBase): + """Variant trait model. + + This model represents a variant of the representation. + + Example:: + + Variant(variant="high") + Variant(variant="prores444) + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + variant (str): Variant name. + """ + + name: ClassVar[str] = "Variant" + description: ClassVar[str] = "Variant Trait Model" + id: ClassVar[str] = "ayon.meta.Variant.v1" + persistent: ClassVar[bool] = True + variant: str + + +@dataclass +class KeepOriginalLocation(TraitBase): + """Keep files in its original location. + + Note: + This is not a persistent trait. + + """ + name: ClassVar[str] = "KeepOriginalLocation" + description: ClassVar[str] = "Keep Original Location Trait Model" + id: ClassVar[str] = "ayon.meta.KeepOriginalLocation.v1" + persistent: ClassVar[bool] = False + + +@dataclass +class KeepOriginalName(TraitBase): + """Keep files in its original name. + + Note: + This is not a persistent trait. + """ + + name: ClassVar[str] = "KeepOriginalName" + description: ClassVar[str] = "Keep Original Name Trait Model" + id: ClassVar[str] = "ayon.meta.KeepOriginalName.v1" + persistent: ClassVar[bool] = False + + +@dataclass +class SourceApplication(TraitBase): + """Metadata about the source (producing) application. + + This can be useful in cases, where this information is + needed but it cannot be determined from other means - like + .txt files used for various motion tracking applications that + must be interpreted by the loader. + + Note that this is not really connected to any logic in + ayon-applications addon. + + Attributes: + application (str): Application name. + variant (str): Application variant. + version (str): Application version. + platform (str): Platform name (Windows, darwin, etc.). + host_name (str): AYON host name if applicable. + """ + + name: ClassVar[str] = "SourceApplication" + description: ClassVar[str] = "Source Application Trait Model" + id: ClassVar[str] = "ayon.meta.SourceApplication.v1" + persistent: ClassVar[bool] = True + application: str + variant: Optional[str] = None + version: Optional[str] = None + platform: Optional[str] = None + host_name: Optional[str] = None + + +@dataclass +class IntendedUse(TraitBase): + """Intended use of the representation. + + This trait describes the intended use of the representation. It + can be used in cases, where the other traits are not enough to + describe the intended use. For example txt file with tracking + points can be used as corner pin in After Effect but not in Nuke. + + Attributes: + use (str): Intended use description. + + """ + name: ClassVar[str] = "IntendedUse" + description: ClassVar[str] = "Intended Use Trait Model" + id: ClassVar[str] = "ayon.meta.IntendedUse.v1" + persistent: ClassVar[bool] = True + use: str diff --git a/client/ayon_core/pipeline/traits/representation.py b/client/ayon_core/pipeline/traits/representation.py new file mode 100644 index 0000000000..43b0397597 --- /dev/null +++ b/client/ayon_core/pipeline/traits/representation.py @@ -0,0 +1,712 @@ +"""Defines the base trait model and representation.""" +from __future__ import annotations + +import contextlib +import inspect +import re +import sys +import uuid +from functools import lru_cache +from types import GenericAlias +from typing import ( + ClassVar, + Generic, + ItemsView, + Optional, + Type, + TypeVar, + Union, +) + +from .trait import ( + IncompatibleTraitVersionError, + LooseMatchingTraitError, + MissingTraitError, + TraitBase, + TraitValidationError, + UpgradableTraitError, +) + +T = TypeVar("T", bound="TraitBase") + + +def _get_version_from_id(_id: str) -> Optional[int]: + """Get version from ID. + + Args: + _id (str): ID. + + Returns: + int: Version. + + """ + match = re.search(r"v(\d+)$", _id) + return int(match[1]) if match else None + + +class Representation(Generic[T]): # noqa: PLR0904 + """Representation of products. + + Representation defines collection of individual properties that describe + the specific "form" of the product. Each property is represented by a + trait therefore the Representation is a collection of traits. + + It holds methods to add, remove, get, and check for the existence of a + trait in the representation. It also provides a method to get all the + + Note: + `PLR0904` is rule for checking number of public methods in a class. + + Arguments: + name (str): Representation name. Must be unique within instance. + representation_id (str): Representation ID. + """ + + _data: dict[str, T] + _module_blacklist: ClassVar[list[str]] = [ + "_", "builtins", "pydantic", + ] + name: str + representation_id: str + + def __hash__(self): + """Return hash of the representation ID.""" + return hash(self.representation_id) + + def __getitem__(self, key: str) -> T: + """Get the trait by ID. + + Args: + key (str): Trait ID. + + Returns: + TraitBase: Trait instance. + + """ + return self.get_trait_by_id(key) + + def __setitem__(self, key: str, value: T) -> None: + """Set the trait by ID. + + Args: + key (str): Trait ID. + value (TraitBase): Trait instance. + + """ + with contextlib.suppress(KeyError): + self._data.pop(key) + + self.add_trait(value) + + def __delitem__(self, key: str) -> None: + """Remove the trait by ID. + + Args: + key (str): Trait ID. + + + """ + self.remove_trait_by_id(key) + + def __contains__(self, key: str) -> bool: + """Check if the trait exists by ID. + + Args: + key (str): Trait ID. + + Returns: + bool: True if the trait exists, False otherwise. + + """ + return self.contains_trait_by_id(key) + + def __iter__(self): + """Return the trait ID iterator.""" + return iter(self._data) + + def __str__(self): + """Return the representation name.""" + return self.name + + def items(self) -> ItemsView[str, T]: + """Return the traits as items.""" + return ItemsView(self._data) + + def add_trait(self, trait: T, *, exists_ok: bool = False) -> None: + """Add a trait to the Representation. + + Args: + trait (TraitBase): Trait to add. + exists_ok (bool, optional): If True, do not raise an error if the + trait already exists. Defaults to False. + + Raises: + ValueError: If the trait ID is not provided or the trait already + exists. + + """ + if not hasattr(trait, "id"): + error_msg = f"Invalid trait {trait} - ID is required." + raise ValueError(error_msg) + if trait.id in self._data and not exists_ok: + error_msg = f"Trait with ID {trait.id} already exists." + raise ValueError(error_msg) + self._data[trait.id] = trait + + def add_traits( + self, traits: list[T], *, exists_ok: bool = False) -> None: + """Add a list of traits to the Representation. + + Args: + traits (list[TraitBase]): List of traits to add. + exists_ok (bool, optional): If True, do not raise an error if the + trait already exists. Defaults to False. + + """ + for trait in traits: + self.add_trait(trait, exists_ok=exists_ok) + + def remove_trait(self, trait: Type[TraitBase]) -> None: + """Remove a trait from the data. + + Args: + trait (TraitBase, optional): Trait class. + + Raises: + ValueError: If the trait is not found. + + """ + try: + self._data.pop(str(trait.id)) + except KeyError as e: + error_msg = f"Trait with ID {trait.id} not found." + raise ValueError(error_msg) from e + + def remove_trait_by_id(self, trait_id: str) -> None: + """Remove a trait from the data by its ID. + + Args: + trait_id (str): Trait ID. + + Raises: + ValueError: If the trait is not found. + + """ + try: + self._data.pop(trait_id) + except KeyError as e: + error_msg = f"Trait with ID {trait_id} not found." + raise ValueError(error_msg) from e + + def remove_traits(self, traits: list[Type[T]]) -> None: + """Remove a list of traits from the Representation. + + If no trait IDs or traits are provided, all traits will be removed. + + Args: + traits (list[TraitBase]): List of trait classes. + + """ + if not traits: + self._data = {} + return + + for trait in traits: + self.remove_trait(trait) + + def remove_traits_by_id(self, trait_ids: list[str]) -> None: + """Remove a list of traits from the Representation by their ID. + + If no trait IDs or traits are provided, all traits will be removed. + + Args: + trait_ids (list[str], optional): List of trait IDs. + + """ + for trait_id in trait_ids: + self.remove_trait_by_id(trait_id) + + def has_traits(self) -> bool: + """Check if the Representation has any traits. + + Returns: + bool: True if the Representation has any traits, False otherwise. + + """ + return bool(self._data) + + def contains_trait(self, trait: Type[T]) -> bool: + """Check if the trait exists in the Representation. + + Args: + trait (TraitBase): Trait class. + + Returns: + bool: True if the trait exists, False otherwise. + + """ + return bool(self._data.get(str(trait.id))) + + def contains_trait_by_id(self, trait_id: str) -> bool: + """Check if the trait exists using trait id. + + Args: + trait_id (str): Trait ID. + + Returns: + bool: True if the trait exists, False otherwise. + + """ + return bool(self._data.get(trait_id)) + + def contains_traits(self, traits: list[Type[T]]) -> bool: + """Check if the traits exist. + + Args: + traits (list[TraitBase], optional): List of trait classes. + + Returns: + bool: True if all traits exist, False otherwise. + + """ + return all(self.contains_trait(trait=trait) for trait in traits) + + def contains_traits_by_id(self, trait_ids: list[str]) -> bool: + """Check if the traits exist by id. + + If no trait IDs or traits are provided, it will check if the + representation has any traits. + + Args: + trait_ids (list[str]): List of trait IDs. + + Returns: + bool: True if all traits exist, False otherwise. + + """ + return all( + self.contains_trait_by_id(trait_id) for trait_id in trait_ids + ) + + def get_trait(self, trait: Type[T]) -> T: + """Get a trait from the representation. + + Args: + trait (TraitBase, optional): Trait class. + + Returns: + TraitBase: Trait instance. + + Raises: + MissingTraitError: If the trait is not found. + + """ + try: + return self._data[str(trait.id)] + except KeyError as e: + msg = f"Trait with ID {trait.id} not found." + raise MissingTraitError(msg) from e + + def get_trait_by_id(self, trait_id: str) -> T: + # sourcery skip: use-named-expression + """Get a trait from the representation by id. + + Args: + trait_id (str): Trait ID. + + Returns: + TraitBase: Trait instance. + + Raises: + MissingTraitError: If the trait is not found. + + """ + version = _get_version_from_id(trait_id) + if version: + try: + return self._data[trait_id] + except KeyError as e: + msg = f"Trait with ID {trait_id} not found." + raise MissingTraitError(msg) from e + + result = next( + ( + self._data.get(trait_id) + for trait_id in self._data + if trait_id.startswith(trait_id) + ), + None, + ) + if result is None: + msg = f"Trait with ID {trait_id} not found." + raise MissingTraitError(msg) + return result + + def get_traits(self, + traits: Optional[list[Type[T]]] = None + ) -> dict[str, T]: + """Get a list of traits from the representation. + + If no trait IDs or traits are provided, all traits will be returned. + + Args: + traits (list[TraitBase], optional): List of trait classes. + + Returns: + dict: Dictionary of traits. + + """ + result: dict[str, T] = {} + if not traits: + for trait_id in self._data: + result[trait_id] = self.get_trait_by_id(trait_id=trait_id) + return result + + for trait in traits: + result[str(trait.id)] = self.get_trait(trait=trait) + return result + + def get_traits_by_ids(self, trait_ids: list[str]) -> dict[str, T]: + """Get a list of traits from the representation by their id. + + If no trait IDs or traits are provided, all traits will be returned. + + Args: + trait_ids (list[str]): List of trait IDs. + + Returns: + dict: Dictionary of traits. + + """ + return { + trait_id: self.get_trait_by_id(trait_id) + for trait_id in trait_ids + } + + def traits_as_dict(self) -> dict: + """Return the traits from Representation data as a dictionary. + + Returns: + dict: Traits data dictionary. + + """ + return { + trait_id: trait.model_dump() + for trait_id, trait in self._data.items() + if trait and trait_id + } + + def __len__(self): + """Return the length of the data.""" + return len(self._data) + + def __init__( + self, + name: str, + representation_id: Optional[str] = None, + traits: Optional[list[T]] = None): + """Initialize the data. + + Args: + name (str): Representation name. Must be unique within instance. + representation_id (str, optional): Representation ID. + traits (list[TraitBase], optional): List of traits. + + """ + self.name = name + self.representation_id = representation_id or uuid.uuid4().hex + self._data = {} + if traits: + for trait in traits: + self.add_trait(trait) + + @staticmethod + def _get_version_from_id(trait_id: str) -> Union[int, None]: + # sourcery skip: use-named-expression + """Check if the trait has version specified. + + Args: + trait_id (str): Trait ID. + + Returns: + int: Trait version. + None: If the trait id does not have a version. + + """ + version_regex = r"v(\d+)$" + match = re.search(version_regex, trait_id) + return int(match[1]) if match else None + + def __eq__(self, other: object) -> bool: # noqa: PLR0911 + """Check if the representation is equal to another. + + Args: + other (Representation): Representation to compare. + + Returns: + bool: True if the representations are equal, False otherwise. + + """ + if not isinstance(other, Representation): + return False + + if self.representation_id != other.representation_id: + return False + + if self.name != other.name: + return False + + # number of traits + if len(self) != len(other): + return False + + for trait_id, trait in self._data.items(): + if trait_id not in other._data: + return False + if trait != other._data[trait_id]: + return False + + return True + + @classmethod + @lru_cache(maxsize=64) + def _get_possible_trait_classes_from_modules( + cls, + trait_id: str) -> set[type[T]]: + """Get possible trait classes from modules. + + Args: + trait_id (str): Trait ID. + + Returns: + set[type[T]]: Set of trait classes. + + """ + modules = sys.modules.copy() + filtered_modules = modules.copy() + for module_name in modules: + for bl_module in cls._module_blacklist: + if module_name.startswith(bl_module): + filtered_modules.pop(module_name) + + trait_candidates = set() + for module in filtered_modules.values(): + if not module: + continue + + for attr_name in dir(module): + klass = getattr(module, attr_name) + if not inspect.isclass(klass): + continue + # this needs to be done because of the bug? in + # python ABCMeta, where ``issubclass`` is not working + # if it hits the GenericAlias (that is in fact + # tuple[int, int]). This is added to the scope by + # ``types`` module. + if type(klass) is GenericAlias: + continue + if issubclass(klass, TraitBase) \ + and str(klass.id).startswith(trait_id): + trait_candidates.add(klass) + # I + return trait_candidates # type: ignore[return-value] + + @classmethod + @lru_cache(maxsize=64) + def _get_trait_class( + cls, trait_id: str) -> Union[Type[T], None]: + """Get the trait class with corresponding to given ID. + + This method will search for the trait class in all the modules except + the blacklisted modules. There is some issue in Pydantic where + ``issubclass`` is not working properly so we are excluding explicitly + modules with offending classes. This list can be updated as needed to + speed up the search. + + Args: + trait_id (str): Trait ID. + + Returns: + Type[TraitBase]: Trait class. + + """ + version = cls._get_version_from_id(trait_id) + + trait_candidates = cls._get_possible_trait_classes_from_modules( + trait_id + ) + if not trait_candidates: + return None + + for trait_class in trait_candidates: + if trait_class.id == trait_id: + # we found direct match + return trait_class + + # if we didn't find direct match, we will search for the highest + # version of the trait. + if not version: + # sourcery skip: use-named-expression + trait_versions = [ + trait_class for trait_class in trait_candidates + if re.match( + rf"{trait_id}.v(\d+)$", str(trait_class.id)) + ] + if trait_versions: + def _get_version_by_id(trait_klass: Type[T]) -> int: + match = re.search(r"v(\d+)$", str(trait_klass.id)) + return int(match[1]) if match else 0 + + error: LooseMatchingTraitError = LooseMatchingTraitError( + "Found trait that might match.") + error.found_trait = max( + trait_versions, key=_get_version_by_id) + error.expected_id = trait_id + raise error + + return None + + @classmethod + def get_trait_class_by_trait_id(cls, trait_id: str) -> Type[T]: + """Get the trait class for the given trait ID. + + Args: + trait_id (str): Trait ID. + + Returns: + type[TraitBase]: Trait class. + + Raises: + IncompatibleTraitVersionError: If the trait version is incompatible + with the current version of the trait. + + """ + try: + trait_class = cls._get_trait_class(trait_id=trait_id) + except LooseMatchingTraitError as e: + requested_version = _get_version_from_id(trait_id) + found_version = _get_version_from_id(e.found_trait.id) + if found_version is None and not requested_version: + msg = ( + "Trait found with no version and requested version " + "is not specified." + ) + raise IncompatibleTraitVersionError(msg) from e + + if requested_version is None: + trait_class = e.found_trait + requested_version = found_version + + if found_version is None: + msg = ( + f"Trait {e.found_trait.id} found with no version, " + "but requested version is specified." + ) + raise IncompatibleTraitVersionError(msg) from e + + if requested_version > found_version: + error_msg = ( + f"Requested trait version {requested_version} is " + f"higher than the found trait version {found_version}." + ) + raise IncompatibleTraitVersionError(error_msg) from e + + if requested_version < found_version and hasattr( + e.found_trait, "upgrade"): + error_msg = ( + "Requested trait version " + f"{requested_version} is lower " + f"than the found trait version {found_version}." + ) + error: UpgradableTraitError = UpgradableTraitError(error_msg) + error.trait = e.found_trait + raise error from e + return trait_class # type: ignore[return-value] + + @classmethod + def from_dict( + cls: Type[Representation], + name: str, + representation_id: Optional[str] = None, + trait_data: Optional[dict] = None) -> Representation: + """Create a representation from a dictionary. + + Args: + name (str): Representation name. + representation_id (str, optional): Representation ID. + trait_data (dict): Representation data. Dictionary with keys + as trait ids and values as trait data. Example:: + + { + "ayon.2d.PixelBased.v1": { + "display_window_width": 1920, + "display_window_height": 1080 + }, + "ayon.2d.Planar.v1": { + "channels": 3 + } + } + + Returns: + Representation: Representation instance. + + Raises: + ValueError: If the trait model with ID is not found. + TypeError: If the trait data is not a dictionary. + IncompatibleTraitVersionError: If the trait version is incompatible + + """ + if not trait_data: + trait_data = {} + traits = [] + for trait_id, value in trait_data.items(): + if not isinstance(value, dict): + msg = ( + f"Invalid trait data for trait ID {trait_id}. " + "Trait data must be a dictionary." + ) + raise TypeError(msg) + + try: + trait_class = cls.get_trait_class_by_trait_id(trait_id) + except UpgradableTraitError as e: + # we found newer version of trait, we will upgrade the data + if hasattr(e.trait, "upgrade"): + traits.append(e.trait.upgrade(value)) + else: + msg = ( + f"Newer version of trait {e.trait.id} found " + f"for requested {trait_id} but without " + "upgrade method." + ) + raise IncompatibleTraitVersionError(msg) from e + else: + if not trait_class: + error_msg = f"Trait model with ID {trait_id} not found." + raise ValueError(error_msg) + + traits.append(trait_class(**value)) + + return cls( + name=name, representation_id=representation_id, traits=traits) + + def validate(self) -> None: + """Validate the representation. + + This method will validate all the traits in the representation. + + Raises: + TraitValidationError: If the trait is invalid within representation + + """ + errors = [] + for trait in self._data.values(): + # we do this in the loop to catch all the errors + try: + trait.validate_trait(self) + except TraitValidationError as e: # noqa: PERF203 + errors.append(str(e)) + if errors: + msg = "\n".join(errors) + scope = self.name + raise TraitValidationError(scope, msg) diff --git a/client/ayon_core/pipeline/traits/temporal.py b/client/ayon_core/pipeline/traits/temporal.py new file mode 100644 index 0000000000..dd11daa975 --- /dev/null +++ b/client/ayon_core/pipeline/traits/temporal.py @@ -0,0 +1,457 @@ +"""Temporal (time related) traits.""" +from __future__ import annotations + +import contextlib +import re +from dataclasses import dataclass +from enum import Enum, auto +from re import Pattern +from typing import TYPE_CHECKING, ClassVar, Optional + +import clique + +from .trait import MissingTraitError, TraitBase, TraitValidationError + +if TYPE_CHECKING: + + from .content import FileLocations + from .representation import Representation + + +class GapPolicy(Enum): + """Gap policy enumeration. + + This type defines how to handle gaps in sequence. + + Attributes: + forbidden (int): Gaps are forbidden. + missing (int): Gaps are interpreted as missing frames. + hold (int): Gaps are interpreted as hold frames (last existing frames). + black (int): Gaps are interpreted as black frames. + """ + + forbidden = auto() + missing = auto() + hold = auto() + black = auto() + + +@dataclass +class FrameRanged(TraitBase): + """Frame ranged trait model. + + Model representing a frame ranged trait. + + Sync with OpenAssetIO MediaCreation Traits. For compatibility with + OpenAssetIO, we'll need to handle different names of attributes: + + * frame_start -> start_frame + * frame_end -> end_frame + ... + + Note: frames_per_second is a string to allow various precision + formats. FPS is a floating point number, but it can be also + represented as a fraction (e.g. "30000/1001") or as a decimal + or even as irrational number. We need to support all these + formats. To work with FPS, we'll need some helper function + to convert FPS to Decimal from string. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + frame_start (int): Frame start. + frame_end (int): Frame end. + frame_in (int): Frame in. + frame_out (int): Frame out. + frames_per_second (str): Frames per second. + step (int): Step. + """ + + name: ClassVar[str] = "FrameRanged" + description: ClassVar[str] = "Frame Ranged Trait" + id: ClassVar[str] = "ayon.time.FrameRanged.v1" + persistent: ClassVar[bool] = True + frame_start: int + frame_end: int + frame_in: Optional[int] = None + frame_out: Optional[int] = None + frames_per_second: str = None + step: Optional[int] = None + + +@dataclass +class Handles(TraitBase): + """Handles trait model. + + Handles define the range of frames that are included or excluded + from the sequence. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + inclusive (bool): Handles are inclusive. + frame_start_handle (int): Frame start handle. + frame_end_handle (int): Frame end handle. + """ + + name: ClassVar[str] = "Handles" + description: ClassVar[str] = "Handles Trait" + id: ClassVar[str] = "ayon.time.Handles.v1" + persistent: ClassVar[bool] = True + inclusive: Optional[bool] = False + frame_start_handle: Optional[int] = None + frame_end_handle: Optional[int] = None + + +@dataclass +class Sequence(TraitBase): + """Sequence trait model. + + This model represents a sequence trait. Based on the FrameRanged trait + and Handles, adding support for gaps policy, frame padding and frame + list specification. Regex is used to match frame numbers. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + gaps_policy (GapPolicy): Gaps policy - how to handle gaps in + sequence. + frame_padding (int): Frame padding. + frame_regex (str): Frame regex - regular expression to match + frame numbers. Must include 'index' named group and 'padding' + named group. + frame_spec (str): Frame list specification of frames. This takes + string like "1-10,20-30,40-50" etc. + """ + + name: ClassVar[str] = "Sequence" + description: ClassVar[str] = "Sequence Trait Model" + id: ClassVar[str] = "ayon.time.Sequence.v1" + persistent: ClassVar[bool] = True + frame_padding: int + gaps_policy: Optional[GapPolicy] = GapPolicy.forbidden + frame_regex: Optional[Pattern] = None + frame_spec: Optional[str] = None + + @classmethod + def validate_frame_regex( + cls, v: Optional[Pattern] + ) -> Optional[Pattern]: + """Validate frame regex. + + Frame regex must have index and padding named groups. + + Returns: + Optional[Pattern]: Compiled regex pattern. + + Raises: + ValueError: If frame regex does not include 'index' and 'padding' + + """ + if v is None: + return v + if v and any(s not in v.pattern for s in ["?P", "?P"]): + msg = "Frame regex must include 'index' and `padding named groups" + raise ValueError(msg) + return v + + def validate_trait(self, representation: Representation) -> None: + """Validate the trait.""" + super().validate_trait(representation) + + # if there is FileLocations trait, run validation + # on it as well + + with contextlib.suppress(MissingTraitError): + self._validate_file_locations(representation) + + def _validate_file_locations(self, representation: Representation) -> None: + """Validate file locations trait. + + If along with the Sequence trait, there is a FileLocations trait, + then we need to validate if the file locations match the frame + list specification. + + Args: + representation (Representation): Representation instance. + + """ + from .content import FileLocations + file_locs: FileLocations = representation.get_trait( + FileLocations) + # validate if file locations on representation + # matches the frame list (if any) + # we need to extend the expected frames with Handles + frame_start = None + frame_end = None + handles_frame_start = None + handles_frame_end = None + with contextlib.suppress(MissingTraitError): + handles: Handles = representation.get_trait(Handles) + # if handles are inclusive, they should be already + # accounted in the FrameRaged frame spec + if not handles.inclusive: + handles_frame_start = handles.frame_start_handle + handles_frame_end = handles.frame_end_handle + with contextlib.suppress(MissingTraitError): + frame_ranged: FrameRanged = representation.get_trait( + FrameRanged) + frame_start = frame_ranged.frame_start + frame_end = frame_ranged.frame_end + if self.frame_spec is not None: + self.validate_frame_list( + file_locs, + frame_start, + frame_end, + handles_frame_start, + handles_frame_end) + + self.validate_frame_padding(file_locs) + + def validate_frame_list( + self, + file_locations: FileLocations, + frame_start: Optional[int] = None, + frame_end: Optional[int] = None, + handles_frame_start: Optional[int] = None, + handles_frame_end: Optional[int] = None) -> None: + """Validate frame list. + + This will take FileLocations trait and validate if the + file locations match the frame list specification. + + For example, if frame list is "1-10,20-30,40-50", then + the frame numbers in the file locations should match + these frames. + + It will skip the validation if frame list is not provided. + + Args: + file_locations (FileLocations): File locations trait. + frame_start (Optional[int]): Frame start. + frame_end (Optional[int]): Frame end. + handles_frame_start (Optional[int]): Frame start handle. + handles_frame_end (Optional[int]): Frame end handle. + + Raises: + TraitValidationError: If frame list does not match + the expected frames. + + """ + if self.frame_spec is None: + return + + frames: list[int] = [] + if self.frame_regex: + frames = self.get_frame_list( + file_locations, self.frame_regex) + else: + frames = self.get_frame_list( + file_locations) + + expected_frames = self.list_spec_to_frames(self.frame_spec) + if frame_start is None or frame_end is None: + if min(expected_frames) != frame_start: + msg = ( + "Frame start does not match the expected frame start. " + f"Expected: {frame_start}, Found: {min(expected_frames)}" + ) + raise TraitValidationError(self.name, msg) + + if max(expected_frames) != frame_end: + msg = ( + "Frame end does not match the expected frame end. " + f"Expected: {frame_end}, Found: {max(expected_frames)}" + ) + raise TraitValidationError(self.name, msg) + + # we need to extend the expected frames with Handles + if handles_frame_start is not None: + expected_frames.extend( + range( + min(frames) - handles_frame_start, min(frames) + 1)) + + if handles_frame_end is not None: + expected_frames.extend( + range( + max(frames), max(frames) + handles_frame_end + 1)) + + if set(frames) != set(expected_frames): + msg = ( + "Frame list does not match the expected frames. " + f"Expected: {expected_frames}, Found: {frames}" + ) + raise TraitValidationError(self.name, msg) + + def validate_frame_padding( + self, file_locations: FileLocations) -> None: + """Validate frame padding. + + This will take FileLocations trait and validate if the + frame padding matches the expected frame padding. + + Args: + file_locations (FileLocations): File locations trait. + + Raises: + TraitValidationError: If frame padding does not match + the expected frame padding. + + """ + expected_padding = self.get_frame_padding(file_locations) + if self.frame_padding != expected_padding: + msg = ( + "Frame padding does not match the expected frame padding. " + f"Expected: {expected_padding}, Found: {self.frame_padding}" + ) + raise TraitValidationError(self.name, msg) + + @staticmethod + def list_spec_to_frames(list_spec: str) -> list[int]: + """Convert list specification to frames. + + Returns: + list[int]: List of frame numbers. + + Raises: + ValueError: If invalid frame number in the list. + + """ + frames = [] + segments = list_spec.split(",") + for segment in segments: + ranges = segment.split("-") + if len(ranges) == 1: + if not ranges[0].isdigit(): + msg = ( + "Invalid frame number " + f"in the list: {ranges[0]}" + ) + raise ValueError(msg) + frames.append(int(ranges[0])) + continue + start, end = segment.split("-") + frames.extend(range(int(start), int(end) + 1)) + return frames + + @staticmethod + def _get_collection( + file_locations: FileLocations, + regex: Optional[Pattern] = None) -> clique.Collection: + r"""Get collection from file locations. + + Args: + file_locations (FileLocations): File locations trait. + regex (Optional[Pattern]): Regular expression to match + frame numbers. This is passed to ``clique.assemble()``. + Default clique pattern is:: + + \.(?P(?P0*)\d+)\.\D+\d?$ + + Returns: + clique.Collection: Collection instance. + + Raises: + ValueError: If zero or multiple collections found. + + """ + patterns = [regex] if regex else None + files: list[str] = [ + file.file_path.as_posix() + for file in file_locations.file_paths + ] + src_collections, _ = clique.assemble(files, patterns=patterns) + if len(src_collections) != 1: + msg = ( + f"Zero or multiple collections found: {len(src_collections)} " + "expected 1" + ) + raise ValueError(msg) + return src_collections[0] + + @staticmethod + def get_frame_padding(file_locations: FileLocations) -> int: + """Get frame padding. + + Returns: + int: Frame padding. + + """ + src_collection = Sequence._get_collection(file_locations) + padding = src_collection.padding + # sometimes Clique doens't get the padding right so + # we need to calculate it manually + if padding == 0: + padding = len(str(max(src_collection.indexes))) + + return padding + + @staticmethod + def get_frame_list( + file_locations: FileLocations, + regex: Optional[Pattern] = None, + ) -> list[int]: + r"""Get frame list. + + Args: + file_locations (FileLocations): File locations trait. + regex (Optional[Pattern]): Regular expression to match + frame numbers. This is passed to ``clique.assemble()``. + Default clique pattern is:: + + \.(?P(?P0*)\d+)\.\D+\d?$ + + Returns: + list[int]: List of frame numbers. + + """ + src_collection = Sequence._get_collection(file_locations, regex) + return list(src_collection.indexes) + + def get_frame_pattern(self) -> Pattern: + """Get frame regex as pattern. + + If the regex is string, it will compile it to the pattern. + + Returns: + Pattern: Compiled regex pattern. + + """ + if self.frame_regex: + if isinstance(self.frame_regex, str): + return re.compile(self.frame_regex) + return self.frame_regex + return re.compile( + r"\.(?P(?P0*)\d+)\.\D+\d?$") + + +# Do we need one for drop and non-drop frame? +@dataclass +class SMPTETimecode(TraitBase): + """SMPTE Timecode trait model. + + Attributes: + timecode (str): SMPTE Timecode HH:MM:SS:FF + """ + + name: ClassVar[str] = "Timecode" + description: ClassVar[str] = "SMPTE Timecode Trait" + id: ClassVar[str] = "ayon.time.SMPTETimecode.v1" + persistent: ClassVar[bool] = True + timecode: str + + +@dataclass +class Static(TraitBase): + """Static time trait. + + Used to define static time (single frame). + """ + + name: ClassVar[str] = "Static" + description: ClassVar[str] = "Static Time Trait" + id: ClassVar[str] = "ayon.time.Static.v1" + persistent: ClassVar[bool] = True diff --git a/client/ayon_core/pipeline/traits/three_dimensional.py b/client/ayon_core/pipeline/traits/three_dimensional.py new file mode 100644 index 0000000000..d68fb99e61 --- /dev/null +++ b/client/ayon_core/pipeline/traits/three_dimensional.py @@ -0,0 +1,93 @@ +"""3D traits.""" +from dataclasses import dataclass +from typing import ClassVar + +from .trait import TraitBase + + +@dataclass +class Spatial(TraitBase): + """Spatial trait model. + + Trait describing spatial information. Up axis valid strings are + "Y", "Z", "X". Handedness valid strings are "left", "right". Meters per + unit is a float value. + + Example:: + + Spatial(up_axis="Y", handedness="right", meters_per_unit=1.0) + + Todo: + * Add value validation for up_axis and handedness. + + Attributes: + up_axis (str): Up axis. + handedness (str): Handedness. + meters_per_unit (float): Meters per unit. + """ + + id: ClassVar[str] = "ayon.3d.Spatial.v1" + name: ClassVar[str] = "Spatial" + description: ClassVar[str] = "Spatial trait model." + persistent: ClassVar[bool] = True + up_axis: str + handedness: str + meters_per_unit: float + + +@dataclass +class Geometry(TraitBase): + """Geometry type trait model. + + Type trait for geometry data. + + Sync with OpenAssetIO MediaCreation Traits. + """ + + id: ClassVar[str] = "ayon.3d.Geometry.v1" + name: ClassVar[str] = "Geometry" + description: ClassVar[str] = "Geometry trait model." + persistent: ClassVar[bool] = True + + +@dataclass +class Shader(TraitBase): + """Shader trait model. + + Type trait for shader data. + + Sync with OpenAssetIO MediaCreation Traits. + """ + + id: ClassVar[str] = "ayon.3d.Shader.v1" + name: ClassVar[str] = "Shader" + description: ClassVar[str] = "Shader trait model." + persistent: ClassVar[bool] = True + + +@dataclass +class Lighting(TraitBase): + """Lighting trait model. + + Type trait for lighting data. + + Sync with OpenAssetIO MediaCreation Traits. + """ + + id: ClassVar[str] = "ayon.3d.Lighting.v1" + name: ClassVar[str] = "Lighting" + description: ClassVar[str] = "Lighting trait model." + persistent: ClassVar[bool] = True + + +@dataclass +class IESProfile(TraitBase): + """IES profile (IES-LM-64) type trait model. + + Sync with OpenAssetIO MediaCreation Traits. + """ + + id: ClassVar[str] = "ayon.3d.IESProfile.v1" + name: ClassVar[str] = "IESProfile" + description: ClassVar[str] = "IES profile trait model." + persistent: ClassVar[bool] = True diff --git a/client/ayon_core/pipeline/traits/trait.py b/client/ayon_core/pipeline/traits/trait.py new file mode 100644 index 0000000000..01a7641c59 --- /dev/null +++ b/client/ayon_core/pipeline/traits/trait.py @@ -0,0 +1,138 @@ +"""Defines the base trait model and representation.""" +from __future__ import annotations + +import re +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import TYPE_CHECKING, Generic, Optional, TypeVar + +if TYPE_CHECKING: + from .representation import Representation + + +T = TypeVar("T", bound="TraitBase") + + +@dataclass +class TraitBase(ABC): + """Base trait model. + + This model must be used as a base for all trait models. + ``id``, ``name``, and ``description`` are abstract attributes that must be + implemented in the derived classes. + """ + + @property + @abstractmethod + def id(self) -> str: + """Abstract attribute for ID.""" + ... + + @property + @abstractmethod + def name(self) -> str: + """Abstract attribute for name.""" + ... + + @property + @abstractmethod + def description(self) -> str: + """Abstract attribute for description.""" + ... + + def validate_trait(self, representation: Representation) -> None: # noqa: PLR6301 + """Validate the trait. + + This method should be implemented in the derived classes to validate + the trait data. It can be used by traits to validate against other + traits in the representation. + + Args: + representation (Representation): Representation instance. + + """ + return + + @classmethod + def get_version(cls) -> Optional[int]: + # sourcery skip: use-named-expression + """Get trait version from ID. + + This assumes Trait ID ends with `.v{version}`. If not, it will + return None. + + Returns: + Optional[int]: Trait version + + """ + version_regex = r"v(\d+)$" + match = re.search(version_regex, str(cls.id)) + return int(match[1]) if match else None + + @classmethod + def get_versionless_id(cls) -> str: + """Get trait ID without version. + + Returns: + str: Trait ID without version. + + """ + return re.sub(r"\.v\d+$", "", str(cls.id)) + + +class IncompatibleTraitVersionError(Exception): + """Incompatible trait version exception. + + This exception is raised when the trait version is incompatible with the + current version of the trait. + """ + + +class UpgradableTraitError(Exception, Generic[T]): + """Upgradable trait version exception. + + This exception is raised when the trait can upgrade existing data + meant for older versions of the trait. It must implement `upgrade` + method that will take old trait data as argument to handle the upgrade. + """ + + trait: T + old_data: dict + + +class LooseMatchingTraitError(Exception, Generic[T]): + """Loose matching trait exception. + + This exception is raised when the trait is found with a loose matching + criteria. + """ + + found_trait: T + expected_id: str + + +class TraitValidationError(Exception): + """Trait validation error exception. + + This exception is raised when the trait validation fails. + """ + + def __init__(self, scope: str, message: str): + """Initialize the exception. + + We could determine the scope from the stack in the future, + provided the scope is always Trait name. + + Args: + scope (str): Scope of the error. + message (str): Error message. + + """ + super().__init__(f"{scope}: {message}") + + +class MissingTraitError(TypeError): + """Missing trait error exception. + + This exception is raised when the trait is missing. + """ diff --git a/client/ayon_core/pipeline/traits/two_dimensional.py b/client/ayon_core/pipeline/traits/two_dimensional.py new file mode 100644 index 0000000000..93d7d8c86a --- /dev/null +++ b/client/ayon_core/pipeline/traits/two_dimensional.py @@ -0,0 +1,204 @@ +"""Two-dimensional image traits.""" +from __future__ import annotations + +import re +from dataclasses import dataclass +from typing import TYPE_CHECKING, ClassVar, Optional + +from .trait import TraitBase + +if TYPE_CHECKING: + from .content import FileLocation, FileLocations + + +@dataclass +class Image(TraitBase): + """Image trait model. + + Type trait model for image. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + """ + + name: ClassVar[str] = "Image" + description: ClassVar[str] = "Image Trait" + id: ClassVar[str] = "ayon.2d.Image.v1" + persistent: ClassVar[bool] = True + + +@dataclass +class PixelBased(TraitBase): + """PixelBased trait model. + + Pixel related trait for image data. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + display_window_width (int): Width of the image display window. + display_window_height (int): Height of the image display window. + pixel_aspect_ratio (float): Pixel aspect ratio. + """ + + name: ClassVar[str] = "PixelBased" + description: ClassVar[str] = "PixelBased Trait Model" + id: ClassVar[str] = "ayon.2d.PixelBased.v1" + persistent: ClassVar[bool] = True + display_window_width: int + display_window_height: int + pixel_aspect_ratio: float + + +@dataclass +class Planar(TraitBase): + """Planar trait model. + + This model represents an Image with planar configuration. + + Todo: + * (antirotor): Is this really a planar configuration? As with + bit planes and everything? If it serves as differentiator for + Deep images, should it be named differently? Like Raster? + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + planar_configuration (str): Planar configuration. + """ + + name: ClassVar[str] = "Planar" + description: ClassVar[str] = "Planar Trait Model" + id: ClassVar[str] = "ayon.2d.Planar.v1" + persistent: ClassVar[bool] = True + planar_configuration: str + + +@dataclass +class Deep(TraitBase): + """Deep trait model. + + Type trait model for deep EXR images. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + """ + + name: ClassVar[str] = "Deep" + description: ClassVar[str] = "Deep Trait Model" + id: ClassVar[str] = "ayon.2d.Deep.v1" + persistent: ClassVar[bool] = True + + +@dataclass +class Overscan(TraitBase): + """Overscan trait model. + + This model represents an overscan (or underscan) trait. Defines the + extra pixels around the image. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + left (int): Left overscan/underscan. + right (int): Right overscan/underscan. + top (int): Top overscan/underscan. + bottom (int): Bottom overscan/underscan. + """ + + name: ClassVar[str] = "Overscan" + description: ClassVar[str] = "Overscan Trait" + id: ClassVar[str] = "ayon.2d.Overscan.v1" + persistent: ClassVar[bool] = True + left: int + right: int + top: int + bottom: int + + +@dataclass +class UDIM(TraitBase): + """UDIM trait model. + + This model represents a UDIM trait. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + udim (int): UDIM value. + udim_regex (str): UDIM regex. + """ + + name: ClassVar[str] = "UDIM" + description: ClassVar[str] = "UDIM Trait" + id: ClassVar[str] = "ayon.2d.UDIM.v1" + persistent: ClassVar[bool] = True + udim: list[int] + udim_regex: Optional[str] = r"(?:\.|_)(?P\d+)\.\D+\d?$" + + # field validator for udim_regex - this works in pydantic model v2 but not + # with the pure data classes + @classmethod + def validate_frame_regex(cls, v: Optional[str]) -> Optional[str]: + """Validate udim regex. + + Returns: + Optional[str]: UDIM regex. + + Raises: + ValueError: UDIM regex must include 'udim' named group. + + """ + if v is not None and "?P" not in v: + msg = "UDIM regex must include 'udim' named group" + raise ValueError(msg) + return v + + def get_file_location_for_udim( + self, + file_locations: FileLocations, + udim: int, + ) -> Optional[FileLocation]: + """Get file location for UDIM. + + Args: + file_locations (FileLocations): File locations. + udim (int): UDIM value. + + Returns: + Optional[FileLocation]: File location. + + """ + pattern = re.compile(self.udim_regex) + for location in file_locations.file_paths: + result = re.search(pattern, location.file_path.name) + if result: + udim_index = int(result.group("udim")) + if udim_index == udim: + return location + return None + + def get_udim_from_file_location( + self, file_location: FileLocation) -> Optional[int]: + """Get UDIM from file location. + + Args: + file_location (FileLocation): File location. + + Returns: + Optional[int]: UDIM value. + + """ + pattern = re.compile(self.udim_regex) + result = re.search(pattern, file_location.file_path.name) + if result: + return int(result.group("udim")) + return None diff --git a/client/ayon_core/pipeline/traits/utils.py b/client/ayon_core/pipeline/traits/utils.py new file mode 100644 index 0000000000..ef22122124 --- /dev/null +++ b/client/ayon_core/pipeline/traits/utils.py @@ -0,0 +1,51 @@ +"""Utility functions for traits.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from clique import assemble + +from ayon_core.pipeline.traits.temporal import FrameRanged + +if TYPE_CHECKING: + from pathlib import Path + + +def get_sequence_from_files(paths: list[Path]) -> FrameRanged: + """Get original frame range from files. + + Note that this cannot guess frame rate, so it's set to 25. + This will also fail on paths that cannot be assembled into + one collection without any reminders. + + Args: + paths (list[Path]): List of file paths. + + Returns: + FrameRanged: FrameRanged trait. + + Raises: + ValueError: If paths cannot be assembled into one collection + + """ + cols, rems = assemble([path.as_posix() for path in paths]) + if rems: + msg = "Cannot assemble paths into one collection" + raise ValueError(msg) + if len(cols) != 1: + msg = "More than one collection found" + raise ValueError(msg) + col = cols[0] + + sorted_frames = sorted(col.indexes) + # First frame used for end value + first_frame = sorted_frames[0] + # Get last frame for padding + last_frame = sorted_frames[-1] + # Use padding from collection of length of last frame as string + # padding = max(col.padding, len(str(last_frame))) + + return FrameRanged( + frame_start=first_frame, frame_end=last_frame, + frames_per_second="25.0" + ) diff --git a/poetry.lock b/poetry.lock index 2d040a5f91..7286ee59a7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "appdirs" @@ -6,7 +6,6 @@ version = "1.4.4" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, @@ -18,30 +17,28 @@ version = "25.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] +tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "ayon-python-api" -version = "1.0.12" +version = "1.0.11" description = "AYON Python API" optional = false python-versions = "*" -groups = ["dev"] files = [ - {file = "ayon-python-api-1.0.12.tar.gz", hash = "sha256:8e4c03436df8afdda4c6ad4efce436068771995bb0153a90e003364afa0e7f55"}, - {file = "ayon_python_api-1.0.12-py3-none-any.whl", hash = "sha256:65f61c2595dd6deb26fed5e3fda7baef887f475fa4b21df12513646ddccf4a7d"}, + {file = "ayon-python-api-1.0.11.tar.gz", hash = "sha256:3497237c379979268c321304c7d19cb2844d699f6ac34c21293f5dac29c13531"}, + {file = "ayon_python_api-1.0.11-py3-none-any.whl", hash = "sha256:f6134c29c8c8a36cdb2882f9404dd43817bd6636d663b3cd2344de9f1fc1e4b2"}, ] [package.dependencies] @@ -51,14 +48,13 @@ Unidecode = ">=1.3.0" [[package]] name = "certifi" -version = "2025.1.31" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -67,7 +63,6 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -75,104 +70,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.1" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +python-versions = ">=3.7.0" +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -181,7 +188,6 @@ version = "2.0.0" description = "Manage collections with common numerical component" optional = false python-versions = ">=3.0, <4.0" -groups = ["dev"] files = [ {file = "clique-2.0.0-py2.py3-none-any.whl", hash = "sha256:45e2a4c6078382e0b217e5e369494279cf03846d95ee601f93290bed5214c22e"}, {file = "clique-2.0.0.tar.gz", hash = "sha256:6e1115dbf21b1726f4b3db9e9567a662d6bdf72487c4a0a1f8cb7f10cf4f4754"}, @@ -194,20 +200,19 @@ test = ["pytest (>=2.3.5,<5)", "pytest-cov (>=2,<3)", "pytest-runner (>=2.7,<3)" [[package]] name = "codespell" -version = "2.4.1" -description = "Fix common misspellings in text files" +version = "2.3.0" +description = "Codespell" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "codespell-2.4.1-py3-none-any.whl", hash = "sha256:3dadafa67df7e4a3dbf51e0d7315061b80d265f9552ebd699b3dd6834b47e425"}, - {file = "codespell-2.4.1.tar.gz", hash = "sha256:299fcdcb09d23e81e35a671bbe746d5ad7e8385972e65dbb833a2eaac33c01e5"}, + {file = "codespell-2.3.0-py3-none-any.whl", hash = "sha256:a9c7cef2501c9cfede2110fd6d4e5e62296920efe9abfb84648df866e47f58d1"}, + {file = "codespell-2.3.0.tar.gz", hash = "sha256:360c7d10f75e65f67bad720af7007e1060a5d395670ec11a7ed1fed9dd17471f"}, ] [package.extras] dev = ["Pygments", "build", "chardet", "pre-commit", "pytest", "pytest-cov", "pytest-dependency", "ruff", "tomli", "twine"] hard-encoding-detection = ["chardet"] -toml = ["tomli ; python_version < \"3.11\""] +toml = ["tomli"] types = ["chardet (>=5.1.0)", "mypy", "pytest", "pytest-cov", "pytest-dependency"] [[package]] @@ -216,8 +221,6 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -229,7 +232,6 @@ version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, @@ -241,7 +243,6 @@ version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -252,31 +253,29 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.17.0" +version = "3.16.1" description = "A platform independent file lock." optional = false -python-versions = ">=3.9" -groups = ["dev"] +python-versions = ">=3.8" files = [ - {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, - {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "identify" -version = "2.6.7" +version = "2.6.3" description = "File identification library for Python" optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "identify-2.6.7-py2.py3-none-any.whl", hash = "sha256:155931cb617a401807b09ecec6635d6c692d180090a1cedca8ef7d58ba5b6aa0"}, - {file = "identify-2.6.7.tar.gz", hash = "sha256:3fa266b42eba321ee0b2bb0936a6a6b9e36a1351cbb69055b3082f4193035684"}, + {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, + {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, ] [package.extras] @@ -288,7 +287,6 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -303,7 +301,6 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -311,14 +308,13 @@ files = [ [[package]] name = "mock" -version = "5.1.0" +version = "5.2.0" description = "Rolling backport of unittest.mock for all Pythons" optional = false python-versions = ">=3.6" -groups = ["dev"] files = [ - {file = "mock-5.1.0-py3-none-any.whl", hash = "sha256:18c694e5ae8a208cdb3d2c20a993ca1a7b0efa258c247a1e565150f477f83744"}, - {file = "mock-5.1.0.tar.gz", hash = "sha256:5e96aad5ccda4718e0a229ed94b2024df75cc2d55575ba5762d31f5767b8767d"}, + {file = "mock-5.2.0-py3-none-any.whl", hash = "sha256:7ba87f72ca0e915175596069dbbcc7c75af7b5e9b9bc107ad6349ede0819982f"}, + {file = "mock-5.2.0.tar.gz", hash = "sha256:4e460e818629b4b173f32d08bf30d3af8123afbb8e04bb5707a1fd4799e503f0"}, ] [package.extras] @@ -326,13 +322,76 @@ build = ["blurb", "twine", "wheel"] docs = ["sphinx"] test = ["pytest", "pytest-cov"] +[[package]] +name = "mypy" +version = "1.14.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e971c1c667007f9f2b397ffa80fa8e1e0adccff336e5e77e74cb5f22868bee87"}, + {file = "mypy-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e86aaeaa3221a278c66d3d673b297232947d873773d61ca3ee0e28b2ff027179"}, + {file = "mypy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1628c5c3ce823d296e41e2984ff88c5861499041cb416a8809615d0c1f41740e"}, + {file = "mypy-1.14.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7fadb29b77fc14a0dd81304ed73c828c3e5cde0016c7e668a86a3e0dfc9f3af3"}, + {file = "mypy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:3fa76988dc760da377c1e5069200a50d9eaaccf34f4ea18428a3337034ab5a44"}, + {file = "mypy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e73c8a154eed31db3445fe28f63ad2d97b674b911c00191416cf7f6459fd49a"}, + {file = "mypy-1.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:273e70fcb2e38c5405a188425aa60b984ffdcef65d6c746ea5813024b68c73dc"}, + {file = "mypy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1daca283d732943731a6a9f20fdbcaa927f160bc51602b1d4ef880a6fb252015"}, + {file = "mypy-1.14.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7e68047bedb04c1c25bba9901ea46ff60d5eaac2d71b1f2161f33107e2b368eb"}, + {file = "mypy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:7a52f26b9c9b1664a60d87675f3bae00b5c7f2806e0c2800545a32c325920bcc"}, + {file = "mypy-1.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d5326ab70a6db8e856d59ad4cb72741124950cbbf32e7b70e30166ba7bbf61dd"}, + {file = "mypy-1.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bf4ec4980bec1e0e24e5075f449d014011527ae0055884c7e3abc6a99cd2c7f1"}, + {file = "mypy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:390dfb898239c25289495500f12fa73aa7f24a4c6d90ccdc165762462b998d63"}, + {file = "mypy-1.14.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7e026d55ddcd76e29e87865c08cbe2d0104e2b3153a523c529de584759379d3d"}, + {file = "mypy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:585ed36031d0b3ee362e5107ef449a8b5dfd4e9c90ccbe36414ee405ee6b32ba"}, + {file = "mypy-1.14.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9f6f4c0b27401d14c483c622bc5105eff3911634d576bbdf6695b9a7c1ba741"}, + {file = "mypy-1.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56b2280cedcb312c7a79f5001ae5325582d0d339bce684e4a529069d0e7ca1e7"}, + {file = "mypy-1.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:342de51c48bab326bfc77ce056ba08c076d82ce4f5a86621f972ed39970f94d8"}, + {file = "mypy-1.14.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:00df23b42e533e02a6f0055e54de9a6ed491cd8b7ea738647364fd3a39ea7efc"}, + {file = "mypy-1.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:e8c8387e5d9dff80e7daf961df357c80e694e942d9755f3ad77d69b0957b8e3f"}, + {file = "mypy-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b16738b1d80ec4334654e89e798eb705ac0c36c8a5c4798496cd3623aa02286"}, + {file = "mypy-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10065fcebb7c66df04b05fc799a854b1ae24d9963c8bb27e9064a9bdb43aa8ad"}, + {file = "mypy-1.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fbb7d683fa6bdecaa106e8368aa973ecc0ddb79a9eaeb4b821591ecd07e9e03c"}, + {file = "mypy-1.14.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3498cb55448dc5533e438cd13d6ddd28654559c8c4d1fd4b5ca57a31b81bac01"}, + {file = "mypy-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:c7b243408ea43755f3a21a0a08e5c5ae30eddb4c58a80f415ca6b118816e60aa"}, + {file = "mypy-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14117b9da3305b39860d0aa34b8f1ff74d209a368829a584eb77524389a9c13e"}, + {file = "mypy-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af98c5a958f9c37404bd4eef2f920b94874507e146ed6ee559f185b8809c44cc"}, + {file = "mypy-1.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0b343a1d3989547024377c2ba0dca9c74a2428ad6ed24283c213af8dbb0710b"}, + {file = "mypy-1.14.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cdb5563c1726c85fb201be383168f8c866032db95e1095600806625b3a648cb7"}, + {file = "mypy-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:74e925649c1ee0a79aa7448baf2668d81cc287dc5782cff6a04ee93f40fb8d3f"}, + {file = "mypy-1.14.0-py3-none-any.whl", hash = "sha256:2238d7f93fc4027ed1efc944507683df3ba406445a2b6c96e79666a045aadfab"}, + {file = "mypy-1.14.0.tar.gz", hash = "sha256:822dbd184d4a9804df5a7d5335a68cf7662930e70b8c1bc976645d1509f9a9d6"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "nodeenv" version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -344,7 +403,6 @@ version = "0.17.0" description = "Editorial interchange format and API" optional = false python-versions = "!=3.9.0,>=3.7" -groups = ["dev"] files = [ {file = "OpenTimelineIO-0.17.0-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:2dd31a570cabfd6227c1b1dd0cc038da10787492c26c55de058326e21fe8a313"}, {file = "OpenTimelineIO-0.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5a1da5d4803d1ba5e846b181a9e0f4a392c76b9acc5e08947772bc086f2ebfc0"}, @@ -386,7 +444,7 @@ files = [ [package.extras] dev = ["check-manifest", "coverage (>=4.5)", "flake8 (>=3.5)", "urllib3 (>=1.24.3)"] -view = ["PySide2 (>=5.11,<6.0) ; platform_machine == \"x86_64\"", "PySide6 (>=6.2,<7.0) ; platform_machine == \"aarch64\""] +view = ["PySide2 (>=5.11,<6.0)", "PySide6 (>=6.2,<7.0)"] [[package]] name = "packaging" @@ -394,7 +452,6 @@ version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, @@ -406,7 +463,6 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -423,7 +479,6 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -435,14 +490,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.8.0" +version = "4.0.1" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" -groups = ["dev"] files = [ - {file = "pre_commit-3.8.0-py2.py3-none-any.whl", hash = "sha256:9a90a53bf82fdd8778d58085faf8d83df56e40dfe18f45b19446e26bf1b3a63f"}, - {file = "pre_commit-3.8.0.tar.gz", hash = "sha256:8bb6494d4a20423842e198980c9ecf9f96607a07ea29549e180eef9ae80fe7af"}, + {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"}, + {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"}, ] [package.dependencies] @@ -458,7 +512,6 @@ version = "1.8.12" description = "Plug-in driven automation framework for content" optional = false python-versions = "*" -groups = ["dev"] files = [ {file = "pyblish-base-1.8.12.tar.gz", hash = "sha256:ebc184eb038864380555227a8b58055dd24ece7e6ef7f16d33416c718512871b"}, {file = "pyblish_base-1.8.12-py2.py3-none-any.whl", hash = "sha256:2cbe956bfbd4175a2d7d22b344cd345800f4d4437153434ab658fc12646a11e8"}, @@ -470,7 +523,6 @@ version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, @@ -493,7 +545,6 @@ version = "1.0.2" description = "pytest-print adds the printer fixture you can use to print messages to the user (directly to the pytest runner, not stdout)" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "pytest_print-1.0.2-py3-none-any.whl", hash = "sha256:3ae7891085dddc3cd697bd6956787240107fe76d6b5cdcfcd782e33ca6543de9"}, {file = "pytest_print-1.0.2.tar.gz", hash = "sha256:2780350a7bbe7117f99c5d708dc7b0431beceda021b1fd3f11200670d7f33679"}, @@ -511,7 +562,6 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -574,7 +624,6 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -592,41 +641,40 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.3.7" +version = "0.9.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0e8377cccb2f07abd25e84fc5b2cbe48eeb0fea9f1719cad7caedb061d70e5ce"}, - {file = "ruff-0.3.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:15a4d1cc1e64e556fa0d67bfd388fed416b7f3b26d5d1c3e7d192c897e39ba4b"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d28bdf3d7dc71dd46929fafeec98ba89b7c3550c3f0978e36389b5631b793663"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:379b67d4f49774ba679593b232dcd90d9e10f04d96e3c8ce4a28037ae473f7bb"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c060aea8ad5ef21cdfbbe05475ab5104ce7827b639a78dd55383a6e9895b7c51"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:ebf8f615dde968272d70502c083ebf963b6781aacd3079081e03b32adfe4d58a"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d48098bd8f5c38897b03604f5428901b65e3c97d40b3952e38637b5404b739a2"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da8a4fda219bf9024692b1bc68c9cff4b80507879ada8769dc7e985755d662ea"}, - {file = "ruff-0.3.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c44e0149f1d8b48c4d5c33d88c677a4aa22fd09b1683d6a7ff55b816b5d074f"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3050ec0af72b709a62ecc2aca941b9cd479a7bf2b36cc4562f0033d688e44fa1"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a29cc38e4c1ab00da18a3f6777f8b50099d73326981bb7d182e54a9a21bb4ff7"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b15cc59c19edca917f51b1956637db47e200b0fc5e6e1878233d3a938384b0b"}, - {file = "ruff-0.3.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e491045781b1e38b72c91247cf4634f040f8d0cb3e6d3d64d38dcf43616650b4"}, - {file = "ruff-0.3.7-py3-none-win32.whl", hash = "sha256:bc931de87593d64fad3a22e201e55ad76271f1d5bfc44e1a1887edd0903c7d9f"}, - {file = "ruff-0.3.7-py3-none-win_amd64.whl", hash = "sha256:5ef0e501e1e39f35e03c2acb1d1238c595b8bb36cf7a170e7c1df1b73da00e74"}, - {file = "ruff-0.3.7-py3-none-win_arm64.whl", hash = "sha256:789e144f6dc7019d1f92a812891c645274ed08af6037d11fc65fcbc183b7d59f"}, - {file = "ruff-0.3.7.tar.gz", hash = "sha256:d5c1aebee5162c2226784800ae031f660c350e7a3402c4d1f8ea4e97e232e3ba"}, +files = [ + {file = "ruff-0.9.9-py3-none-linux_armv6l.whl", hash = "sha256:628abb5ea10345e53dff55b167595a159d3e174d6720bf19761f5e467e68d367"}, + {file = "ruff-0.9.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6cd1428e834b35d7493354723543b28cc11dc14d1ce19b685f6e68e07c05ec7"}, + {file = "ruff-0.9.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5ee162652869120ad260670706f3cd36cd3f32b0c651f02b6da142652c54941d"}, + {file = "ruff-0.9.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3aa0f6b75082c9be1ec5a1db78c6d4b02e2375c3068438241dc19c7c306cc61a"}, + {file = "ruff-0.9.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:584cc66e89fb5f80f84b05133dd677a17cdd86901d6479712c96597a3f28e7fe"}, + {file = "ruff-0.9.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abf3369325761a35aba75cd5c55ba1b5eb17d772f12ab168fbfac54be85cf18c"}, + {file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3403a53a32a90ce929aa2f758542aca9234befa133e29f4933dcef28a24317be"}, + {file = "ruff-0.9.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:18454e7fa4e4d72cffe28a37cf6a73cb2594f81ec9f4eca31a0aaa9ccdfb1590"}, + {file = "ruff-0.9.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fadfe2c88724c9617339f62319ed40dcdadadf2888d5afb88bf3adee7b35bfb"}, + {file = "ruff-0.9.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6df104d08c442a1aabcfd254279b8cc1e2cbf41a605aa3e26610ba1ec4acf0b0"}, + {file = "ruff-0.9.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d7c62939daf5b2a15af48abbd23bea1efdd38c312d6e7c4cedf5a24e03207e17"}, + {file = "ruff-0.9.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9494ba82a37a4b81b6a798076e4a3251c13243fc37967e998efe4cce58c8a8d1"}, + {file = "ruff-0.9.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4efd7a96ed6d36ef011ae798bf794c5501a514be369296c672dab7921087fa57"}, + {file = "ruff-0.9.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ab90a7944c5a1296f3ecb08d1cbf8c2da34c7e68114b1271a431a3ad30cb660e"}, + {file = "ruff-0.9.9-py3-none-win32.whl", hash = "sha256:6b4c376d929c25ecd6d87e182a230fa4377b8e5125a4ff52d506ee8c087153c1"}, + {file = "ruff-0.9.9-py3-none-win_amd64.whl", hash = "sha256:837982ea24091d4c1700ddb2f63b7070e5baec508e43b01de013dc7eff974ff1"}, + {file = "ruff-0.9.9-py3-none-win_arm64.whl", hash = "sha256:3ac78f127517209fe6d96ab00f3ba97cafe38718b23b1db3e96d8b2d39e37ddf"}, + {file = "ruff-0.9.9.tar.gz", hash = "sha256:0062ed13f22173e85f8f7056f9a24016e692efeea8704d1a5e8011b8aa850933"}, ] [[package]] name = "semver" -version = "3.0.4" +version = "3.0.2" description = "Python helper for Semantic Versioning (https://semver.org)" optional = false python-versions = ">=3.7" -groups = ["dev"] files = [ - {file = "semver-3.0.4-py3-none-any.whl", hash = "sha256:9c824d87ba7f7ab4a1890799cec8596f15c1241cb473404ea1cb0c55e4b04746"}, - {file = "semver-3.0.4.tar.gz", hash = "sha256:afc7d8c584a5ed0a11033af086e8af226a9c0b206f313e0301f8dd7b6b589602"}, + {file = "semver-3.0.2-py3-none-any.whl", hash = "sha256:b1ea4686fe70b981f85359eda33199d60c53964284e0cfb4977d243e37cf4bf4"}, + {file = "semver-3.0.2.tar.gz", hash = "sha256:6253adb39c70f6e51afed2fa7152bcd414c411286088fb4b9effb133885ab4cc"}, ] [[package]] @@ -635,7 +683,6 @@ version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, @@ -671,13 +718,23 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + [[package]] name = "unidecode" version = "1.3.8" description = "ASCII transliterations of Unicode text" optional = false python-versions = ">=3.5" -groups = ["dev"] files = [ {file = "Unidecode-1.3.8-py3-none-any.whl", hash = "sha256:d130a61ce6696f8148a3bd8fe779c99adeb4b870584eeb9526584e9aa091fd39"}, {file = "Unidecode-1.3.8.tar.gz", hash = "sha256:cfdb349d46ed3873ece4586b96aa75258726e2fa8ec21d6f00a591d98806c2f4"}, @@ -685,32 +742,30 @@ files = [ [[package]] name = "urllib3" -version = "2.3.0" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.9" -groups = ["dev"] +python-versions = ">=3.8" files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.29.2" +version = "20.28.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" -groups = ["dev"] files = [ - {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, - {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, + {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, + {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, ] [package.dependencies] @@ -720,9 +775,9 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [metadata] -lock-version = "2.1" +lock-version = "2.0" python-versions = ">=3.9.1,<3.10" -content-hash = "0a399d239c49db714c1166c20286fdd5cd62faf12e45ab85833c4d6ea7a04a2a" +content-hash = "8e5b1f886eb608198752e1ce84f6bc89d1c8d53a5eeabff84a4272538f81403f" diff --git a/pyproject.toml b/pyproject.toml index f065ca0c39..16a4dda0b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,10 +20,11 @@ pytest = "^8.0" pytest-print = "^1.0" ayon-python-api = "^1.0" # linting dependencies -ruff = "^0.3.3" -pre-commit = "^3.6.2" +ruff = "^0.9.3" +pre-commit = "^4" codespell = "^2.2.6" semver = "^3.0.2" +mypy = "^1.14.0" mock = "^5.0.0" attrs = "^25.0.0" pyblish-base = "^1.8.7" @@ -68,13 +69,37 @@ exclude = [ line-length = 79 indent-width = 4 + # Assume Python 3.9 target-version = "py39" [tool.ruff.lint] +preview = true +pydocstyle.convention = "google" # Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. -select = ["E", "F", "W"] -ignore = [] +select = ["ALL"] +ignore = [ + "PTH", + "ANN101", # must be set in older version of ruff + "ANN204", + "COM812", + "S603", + "ERA001", + "TRY003", + "UP006", # support for older python version (type vs. Type) + "UP007", # ..^ + "UP035", # .. + "UP045", # Use `X | None` for type annotations + "ARG002", + "INP001", # add `__init__.py` to namespaced package + "FIX002", # FIX all TODOs + "TD003", # missing issue link + "S404", # subprocess module is possibly insecure + "PLC0415", # import must be on top of the file + "CPY001", # missing copyright header + "UP045" + +] # Allow fix for all enabled rules (when `--fix`) is provided. fixable = ["ALL"] @@ -90,6 +115,7 @@ exclude = [ [tool.ruff.lint.per-file-ignores] "client/ayon_core/lib/__init__.py" = ["E402"] +"tests/*.py" = ["S101", "PLR2004"] # allow asserts and magical values [tool.ruff.format] # Like Black, use double quotes for strings. @@ -116,6 +142,9 @@ skip = "./.*,./package/*,*/vendor/*,*/unreal/integration/*,*/aftereffects/api/ex count = true quiet-level = 3 +[tool.mypy] +mypy_path = "$MYPY_CONFIG_FILE_DIR/client" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000..d420712d8b --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests.""" diff --git a/tests/client/ayon_core/pipeline/traits/__init__.py b/tests/client/ayon_core/pipeline/traits/__init__.py new file mode 100644 index 0000000000..ead0593ced --- /dev/null +++ b/tests/client/ayon_core/pipeline/traits/__init__.py @@ -0,0 +1 @@ +"""Tests for the representation traits.""" diff --git a/tests/client/ayon_core/pipeline/traits/lib/__init__.py b/tests/client/ayon_core/pipeline/traits/lib/__init__.py new file mode 100644 index 0000000000..d7ea7ae0ad --- /dev/null +++ b/tests/client/ayon_core/pipeline/traits/lib/__init__.py @@ -0,0 +1,25 @@ +"""Metadata traits.""" +from typing import ClassVar + +from ayon_core.pipeline.traits import TraitBase + + +class NewTestTrait(TraitBase): + """New Test trait model. + + This model represents a tagged trait. + + Attributes: + name (str): Trait name. + description (str): Trait description. + id (str): id should be namespaced trait name with version + """ + + name: ClassVar[str] = "New Test Trait" + description: ClassVar[str] = ( + "This test trait is used for testing updating." + ) + id: ClassVar[str] = "ayon.test.NewTestTrait.v999" + + +__all__ = ["NewTestTrait"] diff --git a/tests/client/ayon_core/pipeline/traits/test_content_traits.py b/tests/client/ayon_core/pipeline/traits/test_content_traits.py new file mode 100644 index 0000000000..4aa149b8ee --- /dev/null +++ b/tests/client/ayon_core/pipeline/traits/test_content_traits.py @@ -0,0 +1,184 @@ +"""Tests for the content traits.""" +from __future__ import annotations + +import re +from pathlib import Path + +import pytest +from ayon_core.pipeline.traits import ( + Bundle, + FileLocation, + FileLocations, + FrameRanged, + Image, + MimeType, + PixelBased, + Planar, + Representation, + Sequence, +) +from ayon_core.pipeline.traits.trait import TraitValidationError + + +def test_bundles() -> None: + """Test bundle trait.""" + diffuse_texture = [ + Image(), + PixelBased( + display_window_width=1920, + display_window_height=1080, + pixel_aspect_ratio=1.0), + Planar(planar_configuration="RGB"), + FileLocation( + file_path=Path("/path/to/diffuse.jpg"), + file_size=1024, + file_hash=None), + MimeType(mime_type="image/jpeg"), + ] + bump_texture = [ + Image(), + PixelBased( + display_window_width=1920, + display_window_height=1080, + pixel_aspect_ratio=1.0), + Planar(planar_configuration="RGB"), + FileLocation( + file_path=Path("/path/to/bump.tif"), + file_size=1024, + file_hash=None), + MimeType(mime_type="image/tiff"), + ] + bundle = Bundle(items=[diffuse_texture, bump_texture]) + representation = Representation(name="test_bundle", traits=[bundle]) + + if representation.contains_trait(trait=Bundle): + assert representation.get_trait(trait=Bundle).items == [ + diffuse_texture, bump_texture + ] + + for item in representation.get_trait(trait=Bundle).items: + sub_representation = Representation(name="test", traits=item) + assert sub_representation.contains_trait(trait=Image) + sub: MimeType = sub_representation.get_trait(trait=MimeType) + assert sub.mime_type in { + "image/jpeg", "image/tiff" + } + + +def test_file_locations_validation() -> None: + """Test FileLocations trait validation.""" + file_locations_list = [ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1051) + ] + + representation = Representation(name="test", traits=[ + FileLocations(file_paths=file_locations_list), + Sequence(frame_padding=4), + ]) + + file_locations_trait: FileLocations = FileLocations( + file_paths=file_locations_list) + + # this should be valid trait + file_locations_trait.validate_trait(representation) + + # add valid FrameRanged trait + frameranged_trait = FrameRanged( + frame_start=1001, + frame_end=1050, + frames_per_second="25" + ) + representation.add_trait(frameranged_trait) + + # it should still validate fine + file_locations_trait.validate_trait(representation) + + # create empty file locations trait + empty_file_locations_trait = FileLocations(file_paths=[]) + representation = Representation(name="test", traits=[ + empty_file_locations_trait + ]) + with pytest.raises(TraitValidationError): + empty_file_locations_trait.validate_trait(representation) + + # create valid file locations trait but with not matching + # frame range trait + representation = Representation(name="test", traits=[ + FileLocations(file_paths=file_locations_list), + Sequence(frame_padding=4), + ]) + invalid_sequence_trait = FrameRanged( + frame_start=1001, + frame_end=1051, + frames_per_second="25" + ) + + representation.add_trait(invalid_sequence_trait) + with pytest.raises(TraitValidationError): + file_locations_trait.validate_trait(representation) + + # invalid representation with mutliple file locations but + # unrelated to either Sequence or Bundle traits + representation = Representation(name="test", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path("/path/to/file_foo.exr"), + file_size=1024, + file_hash=None, + ), + FileLocation( + file_path=Path("/path/to/anotherfile.obj"), + file_size=1234, + file_hash=None, + ) + ]) + ]) + + with pytest.raises(TraitValidationError): + representation.validate() + + +def test_get_file_location_from_frame() -> None: + """Test get_file_location_from_frame method.""" + file_locations_list = [ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1051) + ] + + file_locations_trait: FileLocations = FileLocations( + file_paths=file_locations_list) + + assert file_locations_trait.get_file_location_for_frame(frame=1001) == \ + file_locations_list[0] + assert file_locations_trait.get_file_location_for_frame(frame=1050) == \ + file_locations_list[-1] + assert file_locations_trait.get_file_location_for_frame(frame=1100) is None + + # test with custom regex + sequence = Sequence( + frame_padding=4, + frame_regex=re.compile(r"boo_(?P(?P0*)\d+)\.exr")) + file_locations_list = [ + FileLocation( + file_path=Path(f"/path/to/boo_{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1051) + ] + + file_locations_trait = FileLocations( + file_paths=file_locations_list) + + assert file_locations_trait.get_file_location_for_frame( + frame=1001, sequence_trait=sequence) == \ + file_locations_list[0] diff --git a/tests/client/ayon_core/pipeline/traits/test_time_traits.py b/tests/client/ayon_core/pipeline/traits/test_time_traits.py new file mode 100644 index 0000000000..28ace89910 --- /dev/null +++ b/tests/client/ayon_core/pipeline/traits/test_time_traits.py @@ -0,0 +1,248 @@ +"""Tests for the time related traits.""" +from __future__ import annotations + +import re +from pathlib import Path + +import pytest +from ayon_core.pipeline.traits import ( + FileLocation, + FileLocations, + FrameRanged, + Handles, + Representation, + Sequence, +) +from ayon_core.pipeline.traits.trait import TraitValidationError + + +def test_sequence_validations() -> None: + """Test Sequence trait validation.""" + file_locations_list = [ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1010 + 1) # because range is zero based + ] + + file_locations_list += [ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1015, 1020 + 1) + ] + + file_locations_list += [ + FileLocation + ( + file_path=Path("/path/to/file.1100.exr"), + file_size=1024, + file_hash=None, + ) + ] + + representation = Representation(name="test_1", traits=[ + FileLocations(file_paths=file_locations_list), + FrameRanged( + frame_start=1001, + frame_end=1100, frames_per_second="25"), + Sequence( + frame_padding=4, + frame_spec="1001-1010,1015-1020,1100") + ]) + + representation.get_trait(Sequence).validate_trait(representation) + + # here we set handles and set them as inclusive, so this should pass + representation = Representation(name="test_2", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1100 + 1) # because range is zero based + ]), + Handles( + frame_start_handle=5, + frame_end_handle=5, + inclusive=True + ), + FrameRanged( + frame_start=1001, + frame_end=1100, frames_per_second="25"), + Sequence(frame_padding=4) + ]) + + representation.validate() + + # do the same but set handles as exclusive + representation = Representation(name="test_3", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(996, 1105 + 1) # because range is zero based + ]), + Handles( + frame_start_handle=5, + frame_end_handle=5, + inclusive=False + ), + FrameRanged( + frame_start=1001, + frame_end=1100, frames_per_second="25"), + Sequence(frame_padding=4) + ]) + + representation.validate() + + # invalid representation with file range not extended for handles + representation = Representation(name="test_4", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1050 + 1) # because range is zero based + ]), + Handles( + frame_start_handle=5, + frame_end_handle=5, + inclusive=False + ), + FrameRanged( + frame_start=1001, + frame_end=1050, frames_per_second="25"), + Sequence(frame_padding=4) + ]) + + with pytest.raises(TraitValidationError): + representation.validate() + + # invalid representation with frame spec not matching the files + del representation + representation = Representation(name="test_5", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1050 + 1) # because range is zero based + ]), + FrameRanged( + frame_start=1001, + frame_end=1050, frames_per_second="25"), + Sequence(frame_padding=4, frame_spec="1001-1010,1012-2000") + ]) + with pytest.raises(TraitValidationError): + representation.validate() + + representation = Representation(name="test_6", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1050 + 1) # because range is zero based + ]), + Sequence(frame_padding=4, frame_spec="1-1010,1012-1050"), + Handles( + frame_start_handle=5, + frame_end_handle=5, + inclusive=False + ) + ]) + with pytest.raises(TraitValidationError): + representation.validate() + + representation = Representation(name="test_6", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(996, 1050 + 1) # because range is zero based + ]), + Sequence(frame_padding=4, frame_spec="1001-1010,1012-2000"), + Handles( + frame_start_handle=5, + frame_end_handle=5, + inclusive=False + ) + ]) + with pytest.raises(TraitValidationError): + representation.validate() + + representation = Representation(name="test_7", traits=[ + FileLocations(file_paths=[ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(996, 1050 + 1) # because range is zero based + ]), + Sequence( + frame_padding=4, + frame_regex=re.compile( + r"img\.(?P(?P0*)\d{4})\.png$")), + Handles( + frame_start_handle=5, + frame_end_handle=5, + inclusive=False + ) + ]) + representation.validate() + + +def test_list_spec_to_frames() -> None: + """Test converting list specification to frames.""" + assert Sequence.list_spec_to_frames("1-10,20-30,55") == [ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, + 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 55 + ] + assert Sequence.list_spec_to_frames("1,2,3,4,5") == [ + 1, 2, 3, 4, 5 + ] + assert Sequence.list_spec_to_frames("1-10") == [ + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 + ] + test_list = list(range(1001, 1011)) + test_list += list(range(1012, 2001)) + assert Sequence.list_spec_to_frames("1001-1010,1012-2000") == test_list + + assert Sequence.list_spec_to_frames("1") == [1] + with pytest.raises( + ValueError, + match=r"Invalid frame number in the list: .*"): + Sequence.list_spec_to_frames("a") + + +def test_sequence_get_frame_padding() -> None: + """Test getting frame padding from FileLocations trait.""" + file_locations_list = [ + FileLocation( + file_path=Path(f"/path/to/file.{frame}.exr"), + file_size=1024, + file_hash=None, + ) + for frame in range(1001, 1051) + ] + + representation = Representation(name="test", traits=[ + FileLocations(file_paths=file_locations_list) + ]) + + assert Sequence.get_frame_padding( + file_locations=representation.get_trait(FileLocations)) == 4 diff --git a/tests/client/ayon_core/pipeline/traits/test_traits.py b/tests/client/ayon_core/pipeline/traits/test_traits.py new file mode 100644 index 0000000000..b990c074d3 --- /dev/null +++ b/tests/client/ayon_core/pipeline/traits/test_traits.py @@ -0,0 +1,380 @@ +"""Tests for the representation traits.""" +from __future__ import annotations + +from pathlib import Path + +import pytest +from ayon_core.pipeline.traits import ( + Bundle, + FileLocation, + Image, + MimeType, + Overscan, + PixelBased, + Planar, + Representation, + TraitBase, +) + +REPRESENTATION_DATA: dict = { + FileLocation.id: { + "file_path": Path("/path/to/file"), + "file_size": 1024, + "file_hash": None, + "persistent": True, + }, + Image.id: {"persistent": True}, + PixelBased.id: { + "display_window_width": 1920, + "display_window_height": 1080, + "pixel_aspect_ratio": 1.0, + "persistent": True, + }, + Planar.id: { + "planar_configuration": "RGB", + "persistent": True, + }, + } + + +class UpgradedImage(Image): + """Upgraded image class.""" + id = "ayon.2d.Image.v2" + + @classmethod + def upgrade(cls, data: dict) -> UpgradedImage: # noqa: ARG003 + """Upgrade the trait. + + Returns: + UpgradedImage: Upgraded image instance. + + """ + return cls() + + +class InvalidTrait: + """Invalid trait class.""" + foo = "bar" + + +@pytest.fixture +def representation() -> Representation: + """Return a traits data instance.""" + return Representation(name="test", traits=[ + FileLocation(**REPRESENTATION_DATA[FileLocation.id]), + Image(), + PixelBased(**REPRESENTATION_DATA[PixelBased.id]), + Planar(**REPRESENTATION_DATA[Planar.id]), + ]) + + +def test_representation_errors(representation: Representation) -> None: + """Test errors in representation.""" + with pytest.raises(ValueError, + match=r"Invalid trait .* - ID is required."): + representation.add_trait(InvalidTrait()) + + with pytest.raises(ValueError, + match=f"Trait with ID {Image.id} already exists."): + representation.add_trait(Image()) + + with pytest.raises(ValueError, + match=r"Trait with ID .* not found."): + representation.remove_trait_by_id("foo") + + +def test_representation_traits(representation: Representation) -> None: + """Test setting and getting traits.""" + assert representation.get_trait_by_id( + "ayon.2d.PixelBased").get_version() == 1 + + assert len(representation) == len(REPRESENTATION_DATA) + assert representation.get_trait_by_id(FileLocation.id) + assert representation.get_trait_by_id(Image.id) + assert representation.get_trait_by_id(trait_id="ayon.2d.Image.v1") + assert representation.get_trait_by_id(PixelBased.id) + assert representation.get_trait_by_id(trait_id="ayon.2d.PixelBased.v1") + assert representation.get_trait_by_id(Planar.id) + assert representation.get_trait_by_id(trait_id="ayon.2d.Planar.v1") + + assert representation.get_trait(FileLocation) + assert representation.get_trait(Image) + assert representation.get_trait(PixelBased) + assert representation.get_trait(Planar) + + assert issubclass( + type(representation.get_trait(FileLocation)), TraitBase) + + assert representation.get_trait(FileLocation) == \ + representation.get_trait_by_id(FileLocation.id) + assert representation.get_trait(Image) == \ + representation.get_trait_by_id(Image.id) + assert representation.get_trait(PixelBased) == \ + representation.get_trait_by_id(PixelBased.id) + assert representation.get_trait(Planar) == \ + representation.get_trait_by_id(Planar.id) + + assert representation.get_trait_by_id( + "ayon.2d.PixelBased.v1").display_window_width == \ + REPRESENTATION_DATA[PixelBased.id]["display_window_width"] + assert representation.get_trait( + trait=PixelBased).display_window_height == \ + REPRESENTATION_DATA[PixelBased.id]["display_window_height"] + + repre_dict = { + FileLocation.id: FileLocation(**REPRESENTATION_DATA[FileLocation.id]), + Image.id: Image(), + PixelBased.id: PixelBased(**REPRESENTATION_DATA[PixelBased.id]), + Planar.id: Planar(**REPRESENTATION_DATA[Planar.id]), + } + assert representation.get_traits() == repre_dict + + assert representation.get_traits_by_ids( + trait_ids=[FileLocation.id, Image.id, PixelBased.id, Planar.id]) == \ + repre_dict + assert representation.get_traits( + [FileLocation, Image, PixelBased, Planar]) == \ + repre_dict + + assert representation.has_traits() is True + empty_representation: Representation = Representation( + name="test", traits=[]) + assert empty_representation.has_traits() is False + + assert representation.contains_trait(trait=FileLocation) is True + assert representation.contains_traits([Image, FileLocation]) is True + assert representation.contains_trait_by_id(FileLocation.id) is True + assert representation.contains_traits_by_id( + trait_ids=[FileLocation.id, Image.id]) is True + + assert representation.contains_trait(trait=Bundle) is False + assert representation.contains_traits([Image, Bundle]) is False + assert representation.contains_trait_by_id(Bundle.id) is False + assert representation.contains_traits_by_id( + trait_ids=[FileLocation.id, Bundle.id]) is False + + +def test_trait_removing(representation: Representation) -> None: + """Test removing traits.""" + assert representation.contains_trait_by_id("nonexistent") is False + with pytest.raises( + ValueError, match=r"Trait with ID nonexistent not found."): + representation.remove_trait_by_id("nonexistent") + + assert representation.contains_trait(trait=FileLocation) is True + representation.remove_trait(trait=FileLocation) + assert representation.contains_trait(trait=FileLocation) is False + + assert representation.contains_trait_by_id(Image.id) is True + representation.remove_trait_by_id(Image.id) + assert representation.contains_trait_by_id(Image.id) is False + + assert representation.contains_traits([PixelBased, Planar]) is True + representation.remove_traits([Planar, PixelBased]) + assert representation.contains_traits([PixelBased, Planar]) is False + + assert representation.has_traits() is False + + with pytest.raises( + ValueError, match=f"Trait with ID {Image.id} not found."): + representation.remove_trait(Image) + + +def test_representation_dict_properties( + representation: Representation) -> None: + """Test representation as dictionary.""" + representation = Representation(name="test") + representation[Image.id] = Image() + assert Image.id in representation + image = representation[Image.id] + assert image == Image() + for trait_id, trait in representation.items(): + assert trait_id == Image.id + assert trait == Image() + + +def test_getting_traits_data(representation: Representation) -> None: + """Test getting a batch of traits.""" + result = representation.get_traits_by_ids( + trait_ids=[FileLocation.id, Image.id, PixelBased.id, Planar.id]) + assert result == { + "ayon.2d.Image.v1": Image(), + "ayon.2d.PixelBased.v1": PixelBased( + display_window_width=1920, + display_window_height=1080, + pixel_aspect_ratio=1.0), + "ayon.2d.Planar.v1": Planar(planar_configuration="RGB"), + "ayon.content.FileLocation.v1": FileLocation( + file_path=Path("/path/to/file"), + file_size=1024, + file_hash=None) + } + + +def test_traits_data_to_dict(representation: Representation) -> None: + """Test converting traits data to dictionary.""" + result = representation.traits_as_dict() + assert result == REPRESENTATION_DATA + + +def test_get_version_from_id() -> None: + """Test getting version from trait ID.""" + assert Image().get_version() == 1 + + class TestOverscan(Overscan): + id = "ayon.2d.Overscan.v2" + + assert TestOverscan( + left=0, + right=0, + top=0, + bottom=0 + ).get_version() == 2 + + class TestMimeType(MimeType): + id = "ayon.content.MimeType" + + assert TestMimeType(mime_type="foo/bar").get_version() is None + + +def test_get_versionless_id() -> None: + """Test getting versionless trait ID.""" + assert Image().get_versionless_id() == "ayon.2d.Image" + + class TestOverscan(Overscan): + id = "ayon.2d.Overscan.v2" + + assert TestOverscan( + left=0, + right=0, + top=0, + bottom=0 + ).get_versionless_id() == "ayon.2d.Overscan" + + class TestMimeType(MimeType): + id = "ayon.content.MimeType" + + assert TestMimeType(mime_type="foo/bar").get_versionless_id() == \ + "ayon.content.MimeType" + + +def test_from_dict() -> None: + """Test creating representation from dictionary.""" + traits_data = { + "ayon.content.FileLocation.v1": { + "file_path": "/path/to/file", + "file_size": 1024, + "file_hash": None, + }, + "ayon.2d.Image.v1": {}, + } + + representation = Representation.from_dict( + "test", trait_data=traits_data) + + assert len(representation) == 2 + assert representation.get_trait_by_id("ayon.content.FileLocation.v1") + assert representation.get_trait_by_id("ayon.2d.Image.v1") + + traits_data = { + "ayon.content.FileLocation.v999": { + "file_path": "/path/to/file", + "file_size": 1024, + "file_hash": None, + }, + } + + with pytest.raises(ValueError, match=r"Trait model with ID .* not found."): + representation = Representation.from_dict( + "test", trait_data=traits_data) + + traits_data = { + "ayon.content.FileLocation": { + "file_path": "/path/to/file", + "file_size": 1024, + "file_hash": None, + }, + } + + representation = Representation.from_dict( + "test", trait_data=traits_data) + + assert len(representation) == 1 + assert representation.get_trait_by_id("ayon.content.FileLocation.v1") + + # this won't work right now because we would need to somewhat mock + # the import + """ + from .lib import NewTestTrait + + traits_data = { + "ayon.test.NewTestTrait.v1": {}, + } + + representation = Representation.from_dict( + "test", trait_data=traits_data) + """ + + +def test_representation_equality() -> None: + """Test representation equality.""" + # rep_a and rep_b are equal + rep_a = Representation(name="test", traits=[ + FileLocation(file_path=Path("/path/to/file"), file_size=1024), + Image(), + PixelBased( + display_window_width=1920, + display_window_height=1080, + pixel_aspect_ratio=1.0), + Planar(planar_configuration="RGB"), + ]) + rep_b = Representation(name="test", traits=[ + FileLocation(file_path=Path("/path/to/file"), file_size=1024), + Image(), + PixelBased( + display_window_width=1920, + display_window_height=1080, + pixel_aspect_ratio=1.0), + Planar(planar_configuration="RGB"), + ]) + + # rep_c has different value for planar_configuration then rep_a and rep_b + rep_c = Representation(name="test", traits=[ + FileLocation(file_path=Path("/path/to/file"), file_size=1024), + Image(), + PixelBased( + display_window_width=1920, + display_window_height=1080, + pixel_aspect_ratio=1.0), + Planar(planar_configuration="RGBA"), + ]) + + rep_d = Representation(name="test", traits=[ + FileLocation(file_path=Path("/path/to/file"), file_size=1024), + Image(), + ]) + rep_e = Representation(name="foo", traits=[ + FileLocation(file_path=Path("/path/to/file"), file_size=1024), + Image(), + ]) + rep_f = Representation(name="foo", traits=[ + FileLocation(file_path=Path("/path/to/file"), file_size=1024), + Planar(planar_configuration="RGBA"), + ]) + + # lets assume ids are the same (because ids are randomly generated) + rep_b.representation_id = rep_d.representation_id = rep_a.representation_id + rep_c.representation_id = rep_e.representation_id = rep_a.representation_id + rep_f.representation_id = rep_a.representation_id + assert rep_a == rep_b + + # because of the trait value difference + assert rep_a != rep_c + # because of the type difference + assert rep_a != "foo" + # because of the trait count difference + assert rep_a != rep_d + # because of the name difference + assert rep_d != rep_e + # because of the trait difference + assert rep_d != rep_f diff --git a/tests/client/ayon_core/pipeline/traits/test_two_dimesional_traits.py b/tests/client/ayon_core/pipeline/traits/test_two_dimesional_traits.py new file mode 100644 index 0000000000..f09d2b0864 --- /dev/null +++ b/tests/client/ayon_core/pipeline/traits/test_two_dimesional_traits.py @@ -0,0 +1,63 @@ +"""Tests for the 2d related traits.""" +from __future__ import annotations + +from pathlib import Path + +from ayon_core.pipeline.traits import ( + UDIM, + FileLocation, + FileLocations, + Representation, +) + + +def test_get_file_location_for_udim() -> None: + """Test get_file_location_for_udim.""" + file_locations_list = [ + FileLocation( + file_path=Path("/path/to/file.1001.exr"), + file_size=1024, + file_hash=None, + ), + FileLocation( + file_path=Path("/path/to/file.1002.exr"), + file_size=1024, + file_hash=None, + ), + FileLocation( + file_path=Path("/path/to/file.1003.exr"), + file_size=1024, + file_hash=None, + ), + ] + + representation = Representation(name="test_1", traits=[ + FileLocations(file_paths=file_locations_list), + UDIM(udim=[1001, 1002, 1003]), + ]) + + udim_trait = representation.get_trait(UDIM) + assert udim_trait.get_file_location_for_udim( + file_locations=representation.get_trait(FileLocations), + udim=1001 + ) == file_locations_list[0] + + +def test_get_udim_from_file_location() -> None: + """Test get_udim_from_file_location.""" + file_location_1 = FileLocation( + file_path=Path("/path/to/file.1001.exr"), + file_size=1024, + file_hash=None, + ) + + file_location_2 = FileLocation( + file_path=Path("/path/to/file.xxxxx.exr"), + file_size=1024, + file_hash=None, + ) + assert UDIM(udim=[1001]).get_udim_from_file_location( + file_location_1) == 1001 + + assert UDIM(udim=[1001]).get_udim_from_file_location( + file_location_2) is None diff --git a/tests/conftest.py b/tests/conftest.py index a3c46a9dd7..33c29d13f0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,3 +1,4 @@ +"""conftest.py: pytest configuration file.""" import sys from pathlib import Path @@ -5,5 +6,3 @@ # add client path to sys.path sys.path.append(str(client_path)) - -print(f"Added {client_path} to sys.path")