From 99424a4a1d15e111801bf69f56dce56ed27d2673 Mon Sep 17 00:00:00 2001 From: davidemarcoli Date: Fri, 22 Nov 2024 18:11:16 +0100 Subject: [PATCH 01/12] feat: enable TorBoxDownloader and refactor stream processing in Downloader --- src/program/services/downloaders/__init__.py | 54 ++-- src/program/services/downloaders/torbox.py | 261 +++++++++++++++++++ 2 files changed, 300 insertions(+), 15 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index c324bb45..c47479f6 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -8,7 +8,7 @@ from .alldebrid import AllDebridDownloader from .realdebrid import RealDebridDownloader -# from .torbox import TorBoxDownloader +from .torbox import TorBoxDownloader class InvalidFileSizeException(Exception): pass @@ -30,7 +30,7 @@ def __init__(self): self.services = { RealDebridDownloader: RealDebridDownloader(), AllDebridDownloader: AllDebridDownloader(), - # TorBoxDownloader: TorBoxDownloader() + TorBoxDownloader: TorBoxDownloader() } self.service = next( (service for service in self.services.values() if service.initialized), None @@ -48,25 +48,49 @@ def validate(self): def run(self, item: MediaItem): logger.debug(f"Running downloader for {item.log_string}") - for stream in item.streams: - download_result = None - try: - download_result = self.download_cached_stream(item, stream) - if download_result: - self.validate_filesize(item, download_result) + # for stream in item.streams: + # download_result = None + # try: + # download_result = self.download_cached_stream(item, stream) + # if download_result: + # self.validate_filesize(item, download_result) + # if not self.update_item_attributes(item, download_result): + # raise Exception("No matching files found!") + # break + # except Exception as e: + # if download_result and download_result.torrent_id: + # self.service.delete_torrent(download_result.torrent_id) + # logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") + # item.blacklist_stream(stream) + + # Chunk streams into groups of 10 + chunk_size = 10 + for i in range(0, len(item.streams), chunk_size): + logger.debug(f"Processing chunk {i} to {i + chunk_size}") + chunk = item.streams[i:i + chunk_size] + instant_availability = self.get_instant_availability([stream.infohash for stream in chunk]) + # Filter out streams that aren't cached + available_streams = [stream for stream in chunk if instant_availability.get(stream.infohash, None)] + if not available_streams: + continue + for stream in available_streams: + download_result = None + try: + download_result = self.download_cached_stream(item, stream, instant_availability[stream.infohash]) + if download_result: + self.validate_filesize(item, download_result) if not self.update_item_attributes(item, download_result): raise Exception("No matching files found!") break - except Exception as e: - if download_result and download_result.torrent_id: - self.service.delete_torrent(download_result.torrent_id) - logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") - item.blacklist_stream(stream) + except Exception as e: + if download_result and download_result.torrent_id: + self.service.delete_torrent(download_result.torrent_id) + logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") + item.blacklist_stream(stream) yield item - def download_cached_stream(self, item: MediaItem, stream: Stream) -> DownloadCachedStreamResult: - cached_containers = self.get_instant_availability([stream.infohash]).get(stream.infohash, None) + def download_cached_stream(self, item: MediaItem, stream: Stream, cached_containers: list[dict]) -> DownloadCachedStreamResult: if not cached_containers: raise Exception("Not cached!") the_container = cached_containers[0] diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py index 54bd9a5f..6cf12308 100644 --- a/src/program/services/downloaders/torbox.py +++ b/src/program/services/downloaders/torbox.py @@ -332,3 +332,264 @@ # response_type=dict, # ) # return response.data["data"] + +import time +from datetime import datetime +from enum import Enum +from typing import Dict, List, Optional, Union + +from loguru import logger +from pydantic import BaseModel +from requests import Session + +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + HttpMethod, + ResponseType, + create_service_session, + get_rate_limit_params, +) + +from .shared import VIDEO_EXTENSIONS, DownloaderBase, FileFinder, premium_days_left + + +class TBTorrentStatus(str, Enum): + """Real-Debrid torrent status enumeration""" + MAGNET_ERROR = "magnet_error" + MAGNET_CONVERSION = "magnet_conversion" + WAITING_FILES = "waiting_files_selection" + DOWNLOADING = "downloading" + DOWNLOADED = "downloaded" + ERROR = "error" + SEEDING = "seeding" + DEAD = "dead" + UPLOADING = "uploading" + COMPRESSING = "compressing" + +class TBTorrent(BaseModel): + """Real-Debrid torrent model""" + id: str + hash: str + filename: str + bytes: int + status: TBTorrentStatus + added: datetime + links: List[str] + ended: Optional[datetime] = None + speed: Optional[int] = None + seeders: Optional[int] = None + +class TorBoxError(Exception): + """Base exception for Real-Debrid related errors""" + +class TorBoxRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, base_url: str, request_logging: bool = False): + super().__init__(session, response_type=ResponseType.DICT, base_url=base_url, custom_exception=TorBoxError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> Union[dict, list]: + response = super()._request(method, endpoint, **kwargs) + if response.status_code == 204: + return {} + if not response.data and not response.is_ok: + raise TorBoxError("Invalid JSON response from TorBox") + return response.data + +class TorBoxAPI: + """Handles TorBox API communication""" + BASE_URL = "https://api.torbox.app/v1/api" + + def __init__(self, api_key: str, proxy_url: Optional[str] = None): + self.api_key = api_key + rate_limit_params = get_rate_limit_params(per_second=5) + self.session = create_service_session(rate_limit_params=rate_limit_params) + self.session.headers.update({"Authorization": f"Bearer {api_key}"}) + if proxy_url: + self.session.proxies = {"http": proxy_url, "https": proxy_url} + self.request_handler = TorBoxRequestHandler(self.session, self.BASE_URL) + +class TorBoxDownloader(DownloaderBase): + """Main Torbox downloader class implementing DownloaderBase""" + MAX_RETRIES = 3 + RETRY_DELAY = 1.0 + + def __init__(self): + self.key = "torbox" + self.settings = settings_manager.settings.downloaders.torbox + self.api = None + self.file_finder = None + self.initialized = self.validate() + + def validate(self) -> bool: + """ + Validate Real-Torbox and premium status + Required by DownloaderBase + """ + if not self._validate_settings(): + return False + + self.api = TorBoxAPI( + api_key=self.settings.api_key, + # proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None + ) + self.file_finder = FileFinder("short_name", "size") + + return self._validate_premium() + + def _validate_settings(self) -> bool: + """Validate configuration settings""" + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.warning("TorBox API key is not set") + return False + # if self.settings.proxy_enabled and not self.settings.proxy_url: + # logger.error("Proxy is enabled but no proxy URL is provided") + # return False + return True + + def _validate_premium(self) -> bool: + """Validate premium status""" + try: + response = self.api.request_handler.execute(HttpMethod.GET, "user/me") + user_info = response["data"] + if not user_info.get("plan") or user_info["plan"] == 0: + logger.error("Premium membership required") + return False + + expiration = datetime.fromisoformat( + user_info["premium_expires_at"] + ).replace(tzinfo=None) + logger.info(premium_days_left(expiration)) + return True + except Exception as e: + logger.error(f"Failed to validate premium status: {e}") + return False + + # TODO + def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: + """ + Get instant availability for multiple infohashes with retry logic + Required by DownloaderBase + """ + + if len(infohashes) == 0: + return {} + + for attempt in range(self.MAX_RETRIES): + try: + response = self.api.request_handler.execute( + HttpMethod.GET, + f"torrents/checkcached?hash={','.join(infohashes)}&format=list&list_files=true" + ) + + data = response.get("data") + + if not data: + return {} + + # Return early if data is not a dict + if not isinstance(data, list): + logger.warning(f"Invalid instant availability data from TorBox, expected list, got {type(data)}") + return {} + + return { + entry['hash']: [{i: file for i, file in enumerate(entry['files'])}] + #entry['hash']: [{"1": entry['files']}] + for entry in data + if self._contains_valid_video_files(entry['files']) + # if isinstance(entry, dict) + } + + except Exception as e: + logger.debug(f"Failed to get instant availability (attempt {attempt + 1}/{self.MAX_RETRIES}): {e}") + if attempt < self.MAX_RETRIES - 1: + time.sleep(self.RETRY_DELAY) + continue + + logger.debug("All retry attempts failed for instant availability") + return {} + + # def _filter_valid_containers(self, containers: List[dict]) -> List[dict]: + # """Filter and sort valid video containers""" + # valid_containers = [ + # container for container in containers + # if self._contains_valid_video_files(container) + # ] + # return sorted(valid_containers, key=len, reverse=True) + + def _contains_valid_video_files(self, container: dict) -> bool: + """Check if container has valid video files""" + return all( + any( + file["name"].endswith(ext) and "sample" not in file["name"].lower() + for ext in VIDEO_EXTENSIONS + ) + for file in container + ) + + def add_torrent(self, infohash: str) -> str: + """ + Add a torrent by infohash + Required by DownloaderBase + """ + if not self.initialized: + raise TorBoxError("Downloader not properly initialized") + + try: + magnet = f"magnet:?xt=urn:btih:{infohash}" + response = self.api.request_handler.execute( + HttpMethod.POST, + "torrents/createtorrent", + data={"magnet": magnet.lower()} + ) + return response["data"]["torrent_id"] + except Exception as e: + logger.error(f"Failed to add torrent {infohash}: {e}") + raise + + # TODO + def select_files(self, torrent_id: str, files: List[str]): + """ + Select files from a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise TorBoxError("Downloader not properly initialized") + + # I think that's not required for TorBox + + # TODO + def get_torrent_info(self, torrent_id: str) -> dict: + """ + Get information about a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise TorBoxError("Downloader not properly initialized") + + # Does TorBox have a method to get torrent info? + + # try: + # return self.api.request_handler.execute(HttpMethod.GET, f"torrents/torrentinfo/{torrent_id}")['data'] + # except Exception as e: + # logger.error(f"Failed to get torrent info for {torrent_id}: {e}") + # raise + + # TODO + def delete_torrent(self, torrent_id: str): + """ + Delete a torrent + Required by DownloaderBase + """ + + if not self.initialized: + raise TorBoxError("Downloader not properly initialized") + + logger.debug(f"Deleting torrent {torrent_id}") + + try: + self.api.request_handler.execute(HttpMethod.POST, f"torrents/controltorrent", data={"torrent_id": torrent_id, "operation": "delete"}) + except Exception as e: + logger.error(f"Failed to delete torrent {torrent_id}: {e}") + raise \ No newline at end of file From 70f651b295dfef8fd36a47ebc89dfd9d7e47b7e4 Mon Sep 17 00:00:00 2001 From: Spoked Date: Sat, 23 Nov 2024 23:53:52 -0500 Subject: [PATCH 02/12] refactor: downloaders refactor. wip --- src/program/media/__init__.py | 2 +- src/program/media/item.py | 24 +- src/program/services/downloaders/__init__.py | 191 +++---- src/program/services/downloaders/alldebrid.py | 490 +++++++++-------- src/program/services/downloaders/models.py | 105 ++++ .../services/downloaders/realdebrid.py | 118 +--- src/program/services/downloaders/shared.py | 207 ++----- src/program/services/downloaders/torbox.py | 513 ++---------------- src/program/services/scrapers/torrentio.py | 5 +- src/program/settings/models.py | 2 + src/program/types.py | 14 +- src/routers/secure/scrape.py | 12 +- 12 files changed, 579 insertions(+), 1104 deletions(-) create mode 100644 src/program/services/downloaders/models.py diff --git a/src/program/media/__init__.py b/src/program/media/__init__.py index 1bf3c795..271ab569 100644 --- a/src/program/media/__init__.py +++ b/src/program/media/__init__.py @@ -1,2 +1,2 @@ -from .item import Episode, MediaItem, Movie, Season, Show, ShowMediaType, MovieMediaType, MediaType # noqa +from .item import Episode, MediaItem, Movie, Season, Show # noqa from .state import States # noqa diff --git a/src/program/media/item.py b/src/program/media/item.py index 5b081963..178a64f6 100644 --- a/src/program/media/item.py +++ b/src/program/media/item.py @@ -18,22 +18,6 @@ from ..db.db_functions import blacklist_stream, reset_streams from .stream import Stream -class ShowMediaType(Enum): - """Show media types""" - Show = "show" - Season = "season" - Episode = "episode" - -class MovieMediaType(Enum): - """Media types""" - Movie = "movie" - -class MediaType(Enum): - """Combined media types""" - Show = ShowMediaType.Show.value - Season = ShowMediaType.Season.value - Episode = ShowMediaType.Episode.value - Movie = MovieMediaType.Movie.value class MediaItem(db.Model): """MediaItem class""" @@ -426,7 +410,7 @@ def copy(self, other): return self def __init__(self, item): - self.type = MovieMediaType.Movie.value + self.type = "movie" self.file = item.get("file", None) super().__init__(item) @@ -448,7 +432,7 @@ class Show(MediaItem): } def __init__(self, item): - self.type = ShowMediaType.Show.value + self.type = "show" self.locations = item.get("locations", []) self.seasons: list[Season] = item.get("seasons", []) self.propagate_attributes_to_childs() @@ -563,7 +547,7 @@ def store_state(self, given_state: States = None) -> None: super().store_state(given_state) def __init__(self, item): - self.type = ShowMediaType.Season.value + self.type = "season" self.number = item.get("number", None) self.episodes: list[Episode] = item.get("episodes", []) super().__init__(item) @@ -662,7 +646,7 @@ class Episode(MediaItem): } def __init__(self, item): - self.type = ShowMediaType.Episode.value + self.type = "episode" self.number = item.get("number", None) self.file = item.get("file", None) super().__init__(item) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index c47479f6..ae1d07bc 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -1,41 +1,32 @@ +from typing import List, Union from loguru import logger -from program.media.item import MediaItem, MovieMediaType, ShowMediaType +from program.media.item import MediaItem, Show, Season, Episode, Movie from program.media.state import States from program.media.stream import Stream from program.settings.manager import settings_manager -from program.services.downloaders.shared import filesize_is_acceptable, get_invalid_filesize_log_string +from program.services.downloaders.shared import parse_filename +from program.services.downloaders.models import ( + DebridFile, ParsedFileData, TorrentContainer, TorrentInfo, + DownloadedTorrent, NoMatchingFilesException, NotCachedException +) -from .alldebrid import AllDebridDownloader +# from .alldebrid import AllDebridDownloader from .realdebrid import RealDebridDownloader from .torbox import TorBoxDownloader -class InvalidFileSizeException(Exception): - pass - -class DownloadCachedStreamResult: - def __init__(self, container=None, torrent_id=None, info=None, info_hash=None): - self.container = container - self.torrent_id = torrent_id - self.info = info - self.info_hash = info_hash class Downloader: def __init__(self): self.key = "downloader" self.initialized = False - self.speed_mode = ( - settings_manager.settings.downloaders.prefer_speed_over_quality - ) + self.speed_mode = settings_manager.settings.downloaders.prefer_speed_over_quality self.services = { RealDebridDownloader: RealDebridDownloader(), - AllDebridDownloader: AllDebridDownloader(), - TorBoxDownloader: TorBoxDownloader() + TorBoxDownloader: TorBoxDownloader(), + # AllDebridDownloader: AllDebridDownloader() } - self.service = next( - (service for service in self.services.values() if service.initialized), None - ) - + self.service = next((service for service in self.services.values() if service.initialized), None) self.initialized = self.validate() def validate(self): @@ -47,124 +38,86 @@ def validate(self): return True def run(self, item: MediaItem): - logger.debug(f"Running downloader for {item.log_string}") - # for stream in item.streams: - # download_result = None - # try: - # download_result = self.download_cached_stream(item, stream) - # if download_result: - # self.validate_filesize(item, download_result) - # if not self.update_item_attributes(item, download_result): - # raise Exception("No matching files found!") - # break - # except Exception as e: - # if download_result and download_result.torrent_id: - # self.service.delete_torrent(download_result.torrent_id) - # logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") - # item.blacklist_stream(stream) - - # Chunk streams into groups of 10 + logger.debug(f"Running downloader for {item.log_string} ({item.id})") chunk_size = 10 for i in range(0, len(item.streams), chunk_size): - logger.debug(f"Processing chunk {i} to {i + chunk_size}") - chunk = item.streams[i:i + chunk_size] - instant_availability = self.get_instant_availability([stream.infohash for stream in chunk]) - # Filter out streams that aren't cached - available_streams = [stream for stream in chunk if instant_availability.get(stream.infohash, None)] - if not available_streams: - continue - for stream in available_streams: + logger.debug(f"Processing chunk {i} to {i + chunk_size} of {len(item.streams)} for {item.log_string}") + chunk: List[Stream] = item.streams[i:i + chunk_size] + response: List[TorrentContainer] = self.get_instant_availability([stream.infohash for stream in chunk], item.type) + for container in response: + stream: Stream = next((s for s in chunk if s.infohash == container.infohash), None) download_result = None try: - download_result = self.download_cached_stream(item, stream, instant_availability[stream.infohash]) + if not container.cached: + raise NotCachedException("Not cached!") + download_result: DownloadedTorrent = self.download_cached_stream(stream, container) if download_result: - self.validate_filesize(item, download_result) + logger.log("DEBRID", f"Downloaded {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") if not self.update_item_attributes(item, download_result): - raise Exception("No matching files found!") + raise NoMatchingFilesException("No matching files found!") break except Exception as e: + logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") if download_result and download_result.torrent_id: self.service.delete_torrent(download_result.torrent_id) - logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") item.blacklist_stream(stream) yield item + def download_cached_stream(self, stream: Stream, container: TorrentContainer) -> DownloadedTorrent: + """Download a cached stream""" + torrent_id: str = self.add_torrent(stream.infohash) + info: TorrentInfo = self.get_torrent_info(torrent_id) + self.select_files(torrent_id, container) + return DownloadedTorrent(id=torrent_id, info=info, infohash=container.infohash, container=container) - def download_cached_stream(self, item: MediaItem, stream: Stream, cached_containers: list[dict]) -> DownloadCachedStreamResult: - if not cached_containers: - raise Exception("Not cached!") - the_container = cached_containers[0] - torrent_id = self.add_torrent(stream.infohash) - info = self.get_torrent_info(torrent_id) - self.select_files(torrent_id, the_container.keys()) - logger.log("DEBRID", f"Downloaded {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") - return DownloadCachedStreamResult(the_container, torrent_id, info, stream.infohash) + def update_item_attributes(self, item: MediaItem, download_result: DownloadedTorrent) -> bool: + """Update the item attributes with the downloaded files and active stream""" + if not any(download_result.infohash, download_result.info.id, download_result.info.filename): + return False + item = item + found = False + container: List[DebridFile] = download_result.container.files + for file in container: + file_data: ParsedFileData = parse_filename(file.filename) + if item.type == "movie" and file_data.item_type == "movie": + self._update_attributes(item, file, download_result) + found = True + break + elif item.type in ("show", "season", "episode"): + if not (file_data.season and file_data.episodes): + continue + show: Show = item if item.type == "show" else (item.parent if item.type == "season" else item.parent.parent) + season: Season = next((season for season in show.seasons if season.number == file_data.season), None) + for file_episode in file_data.episodes: + episode: Episode = next((episode for episode in season.episodes if episode.number == file_episode), None) + if episode and episode.state not in [States.Completed, States.Symlinked, States.Downloaded]: + self._update_attributes(episode, file, download_result) + found = True + return found - def get_instant_availability(self, infohashes: list[str]) -> dict[str, list[dict]]: - return self.service.get_instant_availability(infohashes) + def _update_attributes(self, item: Union[Movie, Episode], debrid_file: DebridFile, download_result: DownloadedTorrent) -> None: + """Update the item attributes with the downloaded files and active stream""" + item.file = debrid_file.filename + item.folder = download_result.info.filename + item.alternative_folder = download_result.info.alternative_filename + item.active_stream = {"infohash": download_result.infohash, "id": download_result.info.id} - def add_torrent(self, infohash: str) -> int: + def get_instant_availability(self, infohashes: list[str], item_type: str) -> List[TorrentContainer]: + """Check if the torrent is cached""" + return self.service.get_instant_availability(infohashes, item_type) + + def add_torrent(self, infohash: str) -> str: + """Add a torrent by infohash""" return self.service.add_torrent(infohash) - def get_torrent_info(self, torrent_id: int): + def get_torrent_info(self, torrent_id: int) -> TorrentInfo: + """Get information about a torrent""" return self.service.get_torrent_info(torrent_id) - def select_files(self, torrent_id, container): + def select_files(self, torrent_id: int, container: list[str]) -> None: + """Select files from a torrent""" self.service.select_files(torrent_id, container) - def delete_torrent(self, torrent_id): + def delete_torrent(self, torrent_id: int) -> None: + """Delete a torrent""" self.service.delete_torrent(torrent_id) - - def update_item_attributes(self, item: MediaItem, download_result: DownloadCachedStreamResult) -> bool: - """Update the item attributes with the downloaded files and active stream""" - found = False - item = item - info_hash = download_result.info.get("hash", None) - id = download_result.info.get("id", None) - original_filename = download_result.info.get("original_filename", None) - filename = download_result.info.get("filename", None) - if not info_hash or not id or not original_filename or not filename: - return False - container = download_result.container - for file in container.values(): - if item.type == MovieMediaType.Movie.value and self.service.file_finder.container_file_matches_movie(file): - item.file = file[self.service.file_finder.filename_attr] - item.folder = filename - item.alternative_folder = original_filename - item.active_stream = {"infohash": info_hash, "id": id} - found = True - break - if item.type in (ShowMediaType.Show.value, ShowMediaType.Season.value, ShowMediaType.Episode.value): - show = item - if item.type == ShowMediaType.Season.value: - show = item.parent - elif item.type == ShowMediaType.Episode.value: - show = item.parent.parent - file_season, file_episodes = self.service.file_finder.container_file_matches_episode(file) - if file_season and file_episodes: - season = next((season for season in show.seasons if season.number == file_season), None) - for file_episode in file_episodes: - episode = next((episode for episode in season.episodes if episode.number == file_episode), None) - if episode and episode.state not in [States.Completed, States.Symlinked, States.Downloaded]: - episode.file = file[self.service.file_finder.filename_attr] - episode.folder = filename - episode.alternative_folder = original_filename - episode.active_stream = {"infohash": info_hash, "id": id} - # We have to make sure the episode is correct if item is an episode - if item.type != ShowMediaType.Episode.value or (item.type == ShowMediaType.Episode.value and episode.number == item.number): - found = True - return found - - def validate_filesize(self, item: MediaItem, download_result: DownloadCachedStreamResult): - for file in download_result.container.values(): - item_media_type = self._get_item_media_type(item) - if not filesize_is_acceptable(file[self.service.file_finder.filesize_attr], item_media_type): - - raise InvalidFileSizeException(f"File '{file[self.service.file_finder.filename_attr]}' is invalid: {get_invalid_filesize_log_string(file[self.service.file_finder.filesize_attr], item_media_type)}") - logger.debug(f"All files for {download_result.info_hash} are of an acceptable size") - - @staticmethod - def _get_item_media_type(item): - if item.type in (media_type.value for media_type in ShowMediaType): - return ShowMediaType.Show.value - return MovieMediaType.Movie.value \ No newline at end of file diff --git a/src/program/services/downloaders/alldebrid.py b/src/program/services/downloaders/alldebrid.py index b944ee97..ae5da5d8 100644 --- a/src/program/services/downloaders/alldebrid.py +++ b/src/program/services/downloaders/alldebrid.py @@ -1,246 +1,244 @@ -from datetime import datetime -from typing import Dict, Iterator, List, Optional, Tuple - -from loguru import logger -from requests import Session -from requests.exceptions import ConnectTimeout - -from program.settings.manager import settings_manager -from program.utils.request import ( - BaseRequestHandler, - BaseRequestParameters, - HttpMethod, - ResponseType, - create_service_session, - get_rate_limit_params, -) - -from .shared import VIDEO_EXTENSIONS, DownloaderBase, FileFinder, premium_days_left - - -class AllDebridError(Exception): - """Base exception for AllDebrid related errors""" - -class AllDebridBaseRequestParameters(BaseRequestParameters): - """AllDebrid base request parameters""" - agent: Optional[str] = None - -class AllDebridRequestHandler(BaseRequestHandler): - def __init__(self, session: Session, base_url: str, base_params: AllDebridBaseRequestParameters, request_logging: bool = False): - super().__init__(session, response_type=ResponseType.DICT, base_url=base_url, base_params=base_params, custom_exception=AllDebridError, request_logging=request_logging) - - def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> dict: - response = super()._request(method, endpoint, **kwargs) - if not response.is_ok or not response.data or "data" not in response.data: - raise AllDebridError("Invalid response from AllDebrid") - return response.data["data"] - -class AllDebridAPI: - """Handles AllDebrid API communication""" - BASE_URL = "https://api.alldebrid.com/v4" - AGENT = "Riven" - - def __init__(self, api_key: str, proxy_url: Optional[str] = None): - self.api_key = api_key - rate_limit_params = get_rate_limit_params(per_minute=600, per_second=12) - self.session = create_service_session(rate_limit_params=rate_limit_params) - self.session.headers.update({ - "Authorization": f"Bearer {api_key}" - }) - if proxy_url: - self.session.proxies = {"http": proxy_url, "https": proxy_url} - base_params = AllDebridBaseRequestParameters() - base_params.agent = self.AGENT - self.request_handler = AllDebridRequestHandler(self.session, self.BASE_URL, base_params) - - -class AllDebridDownloader(DownloaderBase): - """Main AllDebrid downloader class implementing DownloaderBase""" - - def __init__(self): - self.key = "alldebrid" - self.settings = settings_manager.settings.downloaders.all_debrid - self.api = None - self.file_finder = None - self.initialized = self.validate() - - def validate(self) -> bool: - """ - Validate AllDebrid settings and premium status - Required by DownloaderBase - """ - if not self._validate_settings(): - return False - - self.api = AllDebridAPI( - api_key=self.settings.api_key, - proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None - ) - - if not self._validate_premium(): - return False - - self.file_finder = FileFinder("filename", "filesize") - logger.success("AllDebrid initialized!") - return True - - def _validate_settings(self) -> bool: - """Validate configuration settings""" - if not self.settings.enabled: - return False - if not self.settings.api_key: - logger.warning("AllDebrid API key is not set") - return False - if self.settings.proxy_enabled and not self.settings.proxy_url: - logger.error("Proxy is enabled but no proxy URL is provided") - return False - return True - - def _validate_premium(self) -> bool: - """Validate premium status""" - try: - user_info = self.api.request_handler.execute(HttpMethod.GET, "user") - user = user_info.get("user", {}) - - if not user.get("isPremium", False): - logger.error("Premium membership required") - return False - - expiration = datetime.utcfromtimestamp(user.get("premiumUntil", 0)) - logger.log("DEBRID", premium_days_left(expiration)) - return True - - except ConnectTimeout: - logger.error("Connection to AllDebrid timed out") - except Exception as e: - logger.error(f"Failed to validate premium status: {e}") - return False - - def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: - """ - Get instant availability for multiple infohashes - Required by DownloaderBase - """ - if not self.initialized: - logger.error("Downloader not properly initialized") - return {} - - try: - params = {f"magnets[{i}]": infohash for i, infohash in enumerate(infohashes)} - response = self.api.request_handler.execute(HttpMethod.GET, "magnet/instant", **params) - magnets = response.get("magnets", []) - - availability = {} - for magnet in magnets: - if not isinstance(magnet, dict) or "files" not in magnet: - continue - - files = magnet.get("files", []) - valid_files = self._process_files(files) - - if valid_files: - availability[magnet["hash"]] = [valid_files] - - return availability - - except Exception as e: - logger.error(f"Failed to get instant availability: {e}") - return {} - - def _walk_files(self, files: List[dict]) -> Iterator[Tuple[str, int]]: - """Walks nested files structure and yields filename, size pairs""" - dirs = [] - for file in files: - try: - size = int(file.get("s", "")) - yield file.get("n", "UNKNOWN"), size - except ValueError: - dirs.append(file) - - for directory in dirs: - yield from self._walk_files(directory.get("e", [])) - - def _process_files(self, files: List[dict]) -> Dict[str, dict]: - """Process and filter valid video files""" - result = {} - for i, (name, size) in enumerate(self._walk_files(files)): - if ( - any(name.lower().endswith(ext) for ext in VIDEO_EXTENSIONS) - and "sample" not in name.lower() - ): - result[str(i)] = {"filename": name, "filesize": size} - return result - - def add_torrent(self, infohash: str) -> str: - """ - Add a torrent by infohash - Required by DownloaderBase - """ - if not self.initialized: - raise AllDebridError("Downloader not properly initialized") - - try: - response = self.api.request_handler.execute( - HttpMethod.GET, - "magnet/upload", - **{"magnets[]": infohash} - ) - magnet_info = response.get("magnets", [])[0] - torrent_id = magnet_info.get("id") - - if not torrent_id: - raise AllDebridError("No torrent ID in response") - - return str(torrent_id) - - except Exception as e: - logger.error(f"Failed to add torrent {infohash}: {e}") - raise - - def select_files(self, torrent_id: str, files: List[str]): - """ - Select files from a torrent - Required by DownloaderBase - """ - if not self.initialized: - raise AllDebridError("Downloader not properly initialized") - - try: - # AllDebrid doesn't have a separate file selection endpoint - # All files are automatically selected when adding the torrent - pass - except Exception as e: - logger.error(f"Failed to select files for torrent {torrent_id}: {e}") - raise - - def get_torrent_info(self, torrent_id: str) -> dict: - """ - Get information about a torrent - Required by DownloaderBase - """ - if not self.initialized: - raise AllDebridError("Downloader not properly initialized") - - try: - response = self.api.request_handler.execute(HttpMethod.GET, "magnet/status", id=torrent_id) - info = response.get("magnets", {}) - if "filename" not in info: - raise AllDebridError("Invalid torrent info response") - return info - except Exception as e: - logger.error(f"Failed to get torrent info for {torrent_id}: {e}") - raise - - def delete_torrent(self, torrent_id: str): - """ - Delete a torrent - Required by DownloaderBase - """ - if not self.initialized: - raise AllDebridError("Downloader not properly initialized") - - try: - self.api.request_handler.execute(HttpMethod.GET, "magnet/delete", id=torrent_id) - except Exception as e: - logger.error(f"Failed to delete torrent {torrent_id}: {e}") - raise \ No newline at end of file +# from datetime import datetime +# from typing import Dict, Iterator, List, Optional, Tuple + +# from loguru import logger +# from requests import Session +# from requests.exceptions import ConnectTimeout + +# from program.settings.manager import settings_manager +# from program.utils.request import ( +# BaseRequestHandler, +# BaseRequestParameters, +# HttpMethod, +# ResponseType, +# create_service_session, +# get_rate_limit_params, +# ) + +# from .shared import DownloaderBase, premium_days_left + + +# class AllDebridError(Exception): +# """Base exception for AllDebrid related errors""" + +# class AllDebridBaseRequestParameters(BaseRequestParameters): +# """AllDebrid base request parameters""" +# agent: Optional[str] = None + +# class AllDebridRequestHandler(BaseRequestHandler): +# def __init__(self, session: Session, base_url: str, base_params: AllDebridBaseRequestParameters, request_logging: bool = False): +# super().__init__(session, response_type=ResponseType.DICT, base_url=base_url, base_params=base_params, custom_exception=AllDebridError, request_logging=request_logging) + +# def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> dict: +# response = super()._request(method, endpoint, **kwargs) +# if not response.is_ok or not response.data or "data" not in response.data: +# raise AllDebridError("Invalid response from AllDebrid") +# return response.data["data"] + +# class AllDebridAPI: +# """Handles AllDebrid API communication""" +# BASE_URL = "https://api.alldebrid.com/v4" +# AGENT = "Riven" + +# def __init__(self, api_key: str, proxy_url: Optional[str] = None): +# self.api_key = api_key +# rate_limit_params = get_rate_limit_params(per_minute=600, per_second=12) +# self.session = create_service_session(rate_limit_params=rate_limit_params) +# self.session.headers.update({ +# "Authorization": f"Bearer {api_key}" +# }) +# if proxy_url: +# self.session.proxies = {"http": proxy_url, "https": proxy_url} +# base_params = AllDebridBaseRequestParameters() +# base_params.agent = self.AGENT +# self.request_handler = AllDebridRequestHandler(self.session, self.BASE_URL, base_params) + + +# class AllDebridDownloader(DownloaderBase): +# """Main AllDebrid downloader class implementing DownloaderBase""" + +# def __init__(self): +# self.key = "alldebrid" +# self.settings = settings_manager.settings.downloaders.all_debrid +# self.api = None +# self.initialized = self.validate() + +# def validate(self) -> bool: +# """ +# Validate AllDebrid settings and premium status +# Required by DownloaderBase +# """ +# if not self._validate_settings(): +# return False + +# self.api = AllDebridAPI( +# api_key=self.settings.api_key, +# proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None +# ) + +# if not self._validate_premium(): +# return False + +# logger.success("AllDebrid initialized!") +# return True + +# def _validate_settings(self) -> bool: +# """Validate configuration settings""" +# if not self.settings.enabled: +# return False +# if not self.settings.api_key: +# logger.warning("AllDebrid API key is not set") +# return False +# if self.settings.proxy_enabled and not self.settings.proxy_url: +# logger.error("Proxy is enabled but no proxy URL is provided") +# return False +# return True + +# def _validate_premium(self) -> bool: +# """Validate premium status""" +# try: +# user_info = self.api.request_handler.execute(HttpMethod.GET, "user") +# user = user_info.get("user", {}) + +# if not user.get("isPremium", False): +# logger.error("Premium membership required") +# return False + +# expiration = datetime.utcfromtimestamp(user.get("premiumUntil", 0)) +# logger.log("DEBRID", premium_days_left(expiration)) +# return True + +# except ConnectTimeout: +# logger.error("Connection to AllDebrid timed out") +# except Exception as e: +# logger.error(f"Failed to validate premium status: {e}") +# return False + +# def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: +# """ +# Get instant availability for multiple infohashes +# Required by DownloaderBase +# """ +# if not self.initialized: +# logger.error("Downloader not properly initialized") +# return {} + +# try: +# params = {f"magnets[{i}]": infohash for i, infohash in enumerate(infohashes)} +# response = self.api.request_handler.execute(HttpMethod.GET, "magnet/instant", **params) +# magnets = response.get("magnets", []) + +# availability = {} +# for magnet in magnets: +# if not isinstance(magnet, dict) or "files" not in magnet: +# continue + +# files = magnet.get("files", []) +# valid_files = self._process_files(files) + +# if valid_files: +# availability[magnet["hash"]] = [valid_files] + +# return availability + +# except Exception as e: +# logger.error(f"Failed to get instant availability: {e}") +# return {} + +# def _walk_files(self, files: List[dict]) -> Iterator[Tuple[str, int]]: +# """Walks nested files structure and yields filename, size pairs""" +# dirs = [] +# for file in files: +# try: +# size = int(file.get("s", "")) +# yield file.get("n", "UNKNOWN"), size +# except ValueError: +# dirs.append(file) + +# for directory in dirs: +# yield from self._walk_files(directory.get("e", [])) + +# def _process_files(self, files: List[dict]) -> Dict[str, dict]: +# """Process and filter valid video files""" +# result = {} +# for i, (name, size) in enumerate(self._walk_files(files)): +# if ( +# any(name.lower().endswith(ext) for ext in VIDEO_EXTENSIONS) +# and "sample" not in name.lower() +# ): +# result[str(i)] = {"filename": name, "filesize": size} +# return result + +# def add_torrent(self, infohash: str) -> str: +# """ +# Add a torrent by infohash +# Required by DownloaderBase +# """ +# if not self.initialized: +# raise AllDebridError("Downloader not properly initialized") + +# try: +# response = self.api.request_handler.execute( +# HttpMethod.GET, +# "magnet/upload", +# **{"magnets[]": infohash} +# ) +# magnet_info = response.get("magnets", [])[0] +# torrent_id = magnet_info.get("id") + +# if not torrent_id: +# raise AllDebridError("No torrent ID in response") + +# return str(torrent_id) + +# except Exception as e: +# logger.error(f"Failed to add torrent {infohash}: {e}") +# raise + +# def select_files(self, torrent_id: str, files: List[str]): +# """ +# Select files from a torrent +# Required by DownloaderBase +# """ +# if not self.initialized: +# raise AllDebridError("Downloader not properly initialized") + +# try: +# # AllDebrid doesn't have a separate file selection endpoint +# # All files are automatically selected when adding the torrent +# pass +# except Exception as e: +# logger.error(f"Failed to select files for torrent {torrent_id}: {e}") +# raise + +# def get_torrent_info(self, torrent_id: str) -> dict: +# """ +# Get information about a torrent +# Required by DownloaderBase +# """ +# if not self.initialized: +# raise AllDebridError("Downloader not properly initialized") + +# try: +# response = self.api.request_handler.execute(HttpMethod.GET, "magnet/status", id=torrent_id) +# info = response.get("magnets", {}) +# if "filename" not in info: +# raise AllDebridError("Invalid torrent info response") +# return info +# except Exception as e: +# logger.error(f"Failed to get torrent info for {torrent_id}: {e}") +# raise + +# def delete_torrent(self, torrent_id: str): +# """ +# Delete a torrent +# Required by DownloaderBase +# """ +# if not self.initialized: +# raise AllDebridError("Downloader not properly initialized") + +# try: +# self.api.request_handler.execute(HttpMethod.GET, "magnet/delete", id=torrent_id) +# except Exception as e: +# logger.error(f"Failed to delete torrent {torrent_id}: {e}") +# raise diff --git a/src/program/services/downloaders/models.py b/src/program/services/downloaders/models.py new file mode 100644 index 00000000..9998064a --- /dev/null +++ b/src/program/services/downloaders/models.py @@ -0,0 +1,105 @@ +from datetime import datetime +from typing import List, Literal, Optional +from loguru import logger +from pydantic import BaseModel, Field +from program.settings.manager import settings_manager + + +DEFAULT_VIDEO_EXTENSIONS = ["mp4", "mkv", "avi"] +ALLOWED_VIDEO_EXTENSIONS = [ + "mp4", "mkv", "avi", "mov", "wmv", "flv", + "m4v", "webm", "mpg","mpeg", "m2ts", "ts", +] + +VIDEO_EXTENSIONS: list[str] = settings_manager.settings.downloaders.video_extensions or DEFAULT_VIDEO_EXTENSIONS +VIDEO_EXTENSIONS = [ext for ext in VIDEO_EXTENSIONS if ext in ALLOWED_VIDEO_EXTENSIONS] +if not VIDEO_EXTENSIONS: + VIDEO_EXTENSIONS = DEFAULT_VIDEO_EXTENSIONS + +movie_min_filesize: int = settings_manager.settings.downloaders.movie_filesize_mb_min +movie_max_filesize: int = settings_manager.settings.downloaders.movie_filesize_mb_max +episode_min_filesize: int = settings_manager.settings.downloaders.episode_filesize_mb_min +episode_max_filesize: int = settings_manager.settings.downloaders.episode_filesize_mb_max + +# constraints for filesizes, follows the format tuple(min, max) +FILESIZE_MOVIE_CONSTRAINT: tuple[int, int] = ( + movie_min_filesize if movie_min_filesize >= 0 else 0, + movie_max_filesize if movie_max_filesize >= 0 else float("inf") +) +FILESIZE_EPISODE_CONSTRAINT: tuple[int, int] = ( + episode_min_filesize if episode_min_filesize >= 0 else 0, + episode_max_filesize if episode_max_filesize >= 0 else float("inf") +) + + +class NotCachedException(Exception): + """Exception raised for torrents that are not cached""" + +class NoMatchingFilesException(Exception): + """Exception raised for torrents that do not match the expected files""" + + +class DebridFile(BaseModel): + """Represents a file in from a debrid service""" + filename: Optional[str] = None + filesize: Optional[int] = None + + @classmethod + def create(cls, filename: str, filesize: int, filetype: str) -> Optional["DebridFile"]: + """Factory method to validate and create a DebridFile""" + if not any(filename.endswith(ext) for ext in VIDEO_EXTENSIONS) and not "sample" in filename.lower(): + return None + if filetype == "movie": + if not (FILESIZE_MOVIE_CONSTRAINT[0] <= filesize <= FILESIZE_MOVIE_CONSTRAINT[1]): + return None + elif filetype == "episode": + if not (FILESIZE_EPISODE_CONSTRAINT[0] <= filesize <= FILESIZE_EPISODE_CONSTRAINT[1]): + return None + return cls(filename=filename, filesize=filesize) + + +class ParsedFileData(BaseModel): + """Represents a parsed file from a filename""" + item_type: Literal["movie", "show"] + season: int = Field(default=None) + episodes: list[int] = Field(default_factory=list) + + +class TorrentContainer(BaseModel): + """Represents a collection of files from an infohash from a debrid service""" + infohash: str + files: List[DebridFile] = Field(default_factory=list) + + @property + def cached(self) -> bool: + """Check if the torrent is cached""" + return len(self.files) > 0 + + +class TorrentInfo(BaseModel): + """Torrent information from a debrid service""" + id: str + infohash: str = Field(default=None) + filename: str = Field(default=None) + status: str = Field(default=None) + progress: float = Field(default=None) + bytes: int = Field(default=None) + speed: int = Field(default=None) + seeders: int = Field(default=None) + created_at: datetime = Field(default=None) + expires_at: datetime = Field(default=None) + completed_at: datetime = Field(default=None) + alternative_filename: str = Field(default=None) # Real-Debrid only + + @property + def size_mb(self) -> float: + """Convert bytes to megabytes""" + return self.bytes / 1_000_000 + + +class DownloadedTorrent(BaseModel): + """Represents the result of a download operation""" + id: str + infohash: str + container: TorrentContainer + info: TorrentInfo diff --git a/src/program/services/downloaders/realdebrid.py b/src/program/services/downloaders/realdebrid.py index 83721348..469dbda1 100644 --- a/src/program/services/downloaders/realdebrid.py +++ b/src/program/services/downloaders/realdebrid.py @@ -1,7 +1,6 @@ -import time from datetime import datetime from enum import Enum -from typing import Dict, List, Optional, Union +from typing import List, Optional, Union from loguru import logger from pydantic import BaseModel @@ -15,8 +14,9 @@ create_service_session, get_rate_limit_params, ) +from program.services.downloaders.models import TorrentContainer, TorrentInfo -from .shared import VIDEO_EXTENSIONS, DownloaderBase, FileFinder, premium_days_left +from .shared import DownloaderBase, premium_days_left class RDTorrentStatus(str, Enum): @@ -75,14 +75,11 @@ def __init__(self, api_key: str, proxy_url: Optional[str] = None): class RealDebridDownloader(DownloaderBase): """Main Real-Debrid downloader class implementing DownloaderBase""" - MAX_RETRIES = 3 - RETRY_DELAY = 1.0 def __init__(self): self.key = "realdebrid" self.settings = settings_manager.settings.downloaders.real_debrid self.api = None - self.file_finder = None self.initialized = self.validate() def validate(self) -> bool: @@ -97,7 +94,6 @@ def validate(self) -> bool: api_key=self.settings.api_key, proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None ) - self.file_finder = FileFinder("filename", "filesize") return self._validate_premium() @@ -130,73 +126,13 @@ def _validate_premium(self) -> bool: logger.error(f"Failed to validate premium status: {e}") return False - def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: - """ - Get instant availability for multiple infohashes with retry logic - Required by DownloaderBase - """ - - if len(infohashes) == 0: - return {} - - for attempt in range(self.MAX_RETRIES): - try: - response = self.api.request_handler.execute( - HttpMethod.GET, - f"torrents/instantAvailability/{'/'.join(infohashes)}" - ) - - # Return early if response is not a dict - if not isinstance(response, dict): - return {} - - # Check for empty response - if all(isinstance(data, list) for data in response.values()): - logger.debug(f"Empty response received (attempt {attempt + 1}/{self.MAX_RETRIES})") - time.sleep(self.RETRY_DELAY) - continue - - return { - infohash: self._filter_valid_containers(data.get("rd", [])) - for infohash, data in response.items() - if isinstance(data, dict) and "rd" in data - } - - except Exception as e: - logger.debug(f"Failed to get instant availability (attempt {attempt + 1}/{self.MAX_RETRIES}): {e}") - if attempt < self.MAX_RETRIES - 1: - time.sleep(self.RETRY_DELAY) - continue - - logger.debug("All retry attempts failed for instant availability") - return {} - - def _filter_valid_containers(self, containers: List[dict]) -> List[dict]: - """Filter and sort valid video containers""" - valid_containers = [ - container for container in containers - if self._contains_valid_video_files(container) - ] - return sorted(valid_containers, key=len, reverse=True) - - def _contains_valid_video_files(self, container: dict) -> bool: - """Check if container has valid video files""" - return all( - any( - file["filename"].endswith(ext) and "sample" not in file["filename"].lower() - for ext in VIDEO_EXTENSIONS - ) - for file in container.values() - ) + def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: + """Get instant availability for multiple infohashes with retry logic""" + # Real-Debrid does not support instant availability anymore + return [] def add_torrent(self, infohash: str) -> str: - """ - Add a torrent by infohash - Required by DownloaderBase - """ - if not self.initialized: - raise RealDebridError("Downloader not properly initialized") - + """Add a torrent by infohash""" try: magnet = f"magnet:?xt=urn:btih:{infohash}" response = self.api.request_handler.execute( @@ -209,49 +145,31 @@ def add_torrent(self, infohash: str) -> str: logger.error(f"Failed to add torrent {infohash}: {e}") raise - def select_files(self, torrent_id: str, files: List[str]): - """ - Select files from a torrent - Required by DownloaderBase - """ - if not self.initialized: - raise RealDebridError("Downloader not properly initialized") - + def select_files(self, torrent_id: str, files: TorrentContainer) -> None: + """Select files from a torrent""" try: self.api.request_handler.execute( HttpMethod.POST, f"torrents/selectFiles/{torrent_id}", - data={"files": ",".join(files)} + data={"files": "all"} ) except Exception as e: logger.error(f"Failed to select files for torrent {torrent_id}: {e}") raise - def get_torrent_info(self, torrent_id: str) -> dict: - """ - Get information about a torrent - Required by DownloaderBase - """ - if not self.initialized: - raise RealDebridError("Downloader not properly initialized") - + def get_torrent_info(self, torrent_id: str) -> TorrentInfo: + """Get information about a torrent""" try: - return self.api.request_handler.execute(HttpMethod.GET, f"torrents/info/{torrent_id}") + data = self.api.request_handler.execute(HttpMethod.GET, f"torrents/info/{torrent_id}") + return TorrentInfo(**data) except Exception as e: logger.error(f"Failed to get torrent info for {torrent_id}: {e}") raise - def delete_torrent(self, torrent_id: str): - """ - Delete a torrent - Required by DownloaderBase - """ - - if not self.initialized: - raise RealDebridError("Downloader not properly initialized") - + def delete_torrent(self, torrent_id: str) -> None: + """Delete a torrent""" try: self.api.request_handler.execute(HttpMethod.DELETE, f"torrents/delete/{torrent_id}") except Exception as e: logger.error(f"Failed to delete torrent {torrent_id}: {e}") - raise \ No newline at end of file + raise diff --git a/src/program/services/downloaders/shared.py b/src/program/services/downloaders/shared.py index cc3b2c74..b796cf9b 100644 --- a/src/program/services/downloaders/shared.py +++ b/src/program/services/downloaders/shared.py @@ -1,99 +1,90 @@ from abc import ABC, abstractmethod from datetime import datetime -from typing import Tuple - -from loguru import logger +from typing import List from RTN import parse -from program.media import MovieMediaType, ShowMediaType -from program.settings.manager import settings_manager - -DEFAULT_VIDEO_EXTENSIONS = ["mp4", "mkv", "avi"] -ALLOWED_VIDEO_EXTENSIONS = [ - "mp4", - "mkv", - "avi", - "mov", - "wmv", - "flv", - "m4v", - "webm", - "mpg", - "mpeg", - "m2ts", - "ts", -] - -VIDEO_EXTENSIONS = ( - settings_manager.settings.downloaders.video_extensions or DEFAULT_VIDEO_EXTENSIONS -) -VIDEO_EXTENSIONS = [ext for ext in VIDEO_EXTENSIONS if ext in ALLOWED_VIDEO_EXTENSIONS] - -if not VIDEO_EXTENSIONS: - VIDEO_EXTENSIONS = DEFAULT_VIDEO_EXTENSIONS - -# Type aliases -InfoHash = str # A torrent hash -DebridTorrentId = ( - str # Identifier issued by the debrid service for a torrent in their cache -) +from program.services.downloaders.models import ParsedFileData, TorrentInfo, TorrentContainer class DownloaderBase(ABC): - """ - The abstract base class for all Downloader implementations. - """ + """The abstract base class for all Downloader implementations.""" @abstractmethod - def validate(): + def validate(self) -> bool: + """ + Validate the downloader configuration and premium status + + Returns: + ValidationResult: Contains validation status and any error messages + """ pass + @abstractmethod - def get_instant_availability(): + def get_instant_availability(self, infohashes: List[str]) -> List[TorrentContainer]: + """ + Get instant availability for multiple infohashes + + Args: + infohashes: List of torrent hashes to check + + Returns: + List[TorrentContainer]: Cached status and available files for each hash + """ pass @abstractmethod - def add_torrent(): + def add_torrent(self, infohash: str) -> str: + """ + Add a torrent and return its information + + Args: + infohash: The hash of the torrent to add + + Returns: + str: The ID of the added torrent + """ pass @abstractmethod - def select_files(): + def select_files(self, request: list[int]) -> None: + """ + Select which files to download from the torrent + + Args: + request: File selection details including torrent ID and file IDs + """ pass @abstractmethod - def get_torrent_info(): + def get_torrent_info(self, torrent_id: str) -> TorrentInfo: + """ + Get information about a specific torrent using its ID + + Args: + torrent_id: ID of the torrent to get info for + + Returns: + TorrentInfo: Current information about the torrent + """ pass @abstractmethod - def delete_torrent(): + def delete_torrent(self, torrent_id: str) -> None: + """ + Delete a torrent from the service + + Args: + torrent_id: ID of the torrent to delete + """ pass -class FileFinder: - """ - A class that helps you find files. - Attributes: - filename_attr (str): The name of the file attribute. - """ +def parse_filename(filename: str) -> ParsedFileData: + """Parse a filename into a ParsedFileData object""" + parsed_data = parse(filename) + season = parsed_data.seasons[0] if parsed_data.seasons else None + return ParsedFileData(item_type=parsed_data.type, season=season, episodes=parsed_data.episodes) - def __init__(self, name, size): - self.filename_attr = name - self.filesize_attr = size - - def container_file_matches_episode(self, file): - filename = file[self.filename_attr] - try: - parsed_data = parse(filename) - return parsed_data.seasons[0], parsed_data.episodes - except Exception: - return None, None - - def container_file_matches_movie(self, file): - filename = file[self.filename_attr] - try: - parsed_data = parse(filename) - return parsed_data.type == "movie" - except Exception: - return None def premium_days_left(expiration: datetime) -> str: """Convert an expiration date into a message showing days remaining on the user's premium account""" @@ -111,81 +102,3 @@ def premium_days_left(expiration: datetime) -> str: else: expiration_message = "Your account expires soon." return expiration_message - - -def hash_from_uri(magnet_uri: str) -> str: - if len(magnet_uri) == 40: - # Probably already a hash - return magnet_uri - start = magnet_uri.index("urn:btih:") + len("urn:btih:") - return magnet_uri[start : start + 40] - -min_movie_filesize = settings_manager.settings.downloaders.movie_filesize_mb_min -max_movie_filesize = settings_manager.settings.downloaders.movie_filesize_mb_max -min_episode_filesize = settings_manager.settings.downloaders.episode_filesize_mb_min -max_episode_filesize = settings_manager.settings.downloaders.episode_filesize_mb_max - -def _validate_filesize_setting(value: int, setting_name: str) -> bool: - """Validate a single filesize setting.""" - if not isinstance(value, int) or value < -1: - logger.error(f"{setting_name} is not valid. Got {value}, expected integer >= -1") - return False - return True - -def _validate_filesizes() -> bool: - """ - Validate all filesize settings from configuration. - Returns True if all settings are valid integers >= -1, False otherwise. - """ - settings = settings_manager.settings.downloaders - return all([ - _validate_filesize_setting(settings.movie_filesize_mb_min, "Movie filesize min"), - _validate_filesize_setting(settings.movie_filesize_mb_max, "Movie filesize max"), - _validate_filesize_setting(settings.episode_filesize_mb_min, "Episode filesize min"), - _validate_filesize_setting(settings.episode_filesize_mb_max, "Episode filesize max") - ]) - -are_filesizes_valid = _validate_filesizes() - -BYTES_PER_MB = 1_000_000 - -def _convert_to_bytes(size_mb: int) -> int: - """Convert size from megabytes to bytes.""" - return size_mb * BYTES_PER_MB - -def _get_size_limits(media_type: str) -> Tuple[int, int]: - """Get min and max size limits in MB for given media type.""" - settings = settings_manager.settings.downloaders - if media_type == MovieMediaType.Movie.value: - return (settings.movie_filesize_mb_min, settings.movie_filesize_mb_max) - return (settings.episode_filesize_mb_min, settings.episode_filesize_mb_max) - -def _validate_filesize(filesize: int, media_type: str) -> bool: - """ - Validate file size against configured limits. - - Args: - filesize: Size in bytes to validate - media_type: Type of media being validated - - Returns: - bool: True if size is within configured range - """ - if not are_filesizes_valid: - logger.error(f"Filesize settings are invalid, {media_type} file sizes will not be checked.") - return True - - min_mb, max_mb = _get_size_limits(media_type) - min_size = 0 if min_mb == -1 else _convert_to_bytes(min_mb) - max_size = float("inf") if max_mb == -1 else _convert_to_bytes(max_mb) - - return min_size <= filesize <= max_size - - -def filesize_is_acceptable(filesize: int, media_type: str) -> bool: - return _validate_filesize(filesize, media_type) - -def get_invalid_filesize_log_string(filesize: int, media_type: str) -> str: - min_mb, max_mb = _get_size_limits(media_type) - friendly_filesize = round(filesize / BYTES_PER_MB, 2) - return f"{friendly_filesize} MB is not within acceptable range of [{min_mb}MB] to [{max_mb}MB]" \ No newline at end of file diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py index 6cf12308..55016508 100644 --- a/src/program/services/downloaders/torbox.py +++ b/src/program/services/downloaders/torbox.py @@ -1,345 +1,7 @@ -# import contextlib -# from datetime import datetime -# from pathlib import Path -# from posixpath import splitext -# from typing import Generator - -# from requests import ConnectTimeout -# from RTN import parse -# from RTN.exceptions import GarbageTorrent - -# from program.db.db import db -# from program.db.db_functions import get_stream_count, load_streams_in_pages -# from program.media.item import MediaItem -# from program.media.state import States -# from program.media.stream import Stream -# from program.settings.manager import settings_manager -# from loguru import logger -# from program.utils.request import get, post - -# API_URL = "https://api.torbox.app/v1/api" -# WANTED_FORMATS = {".mkv", ".mp4", ".avi"} - - -# class TorBoxDownloader: -# """TorBox Downloader""" - -# def __init__(self): -# self.key = "torbox_downloader" -# self.settings = settings_manager.settings.downloaders.torbox -# self.api_key = self.settings.api_key -# self.base_url = "https://api.torbox.app/v1/api" -# self.headers = {"Authorization": f"Bearer {self.api_key}"} -# self.initialized = self.validate() -# if not self.initialized: -# return -# logger.success("TorBox Downloader initialized!") - -# def validate(self) -> bool: -# """Validate the TorBox Downloader as a service""" -# if not self.settings.enabled: -# return False -# if not self.settings.api_key: -# logger.error("Torbox API key is not set") -# try: -# response = get(f"{self.base_url}/user/me", headers=self.headers) -# if response.is_ok: -# user_info = response.data.data -# expiration = user_info.premium_expires_at -# expiration_date_time = datetime.fromisoformat(expiration) -# expiration_date_time.replace(tzinfo=None) -# delta = expiration_date_time - datetime.now().replace( -# tzinfo=expiration_date_time.tzinfo -# ) - -# if delta.days > 0: -# expiration_message = f"Your account expires in {delta.days} days." -# else: -# expiration_message = "Your account expires soon." - -# if user_info.plan == 0: -# logger.error("You are not a premium member.") -# return False -# else: -# logger.log("DEBRID", expiration_message) - -# return user_info.plan != 0 -# except ConnectTimeout: -# logger.error("Connection to Torbox timed out.") -# except Exception as e: -# logger.exception(f"Failed to validate Torbox settings: {e}") -# return False - -# def run(self, item: MediaItem) -> bool: -# """Download media item from torbox.app""" -# return_value = False -# stream_count = get_stream_count(item._id) -# processed_stream_hashes = set() # Track processed stream hashes -# stream_hashes = {} - -# number_of_rows_per_page = 5 -# total_pages = (stream_count // number_of_rows_per_page) + 1 - -# for page_number in range(total_pages): -# with db.Session() as session: -# for stream_id, infohash, stream in load_streams_in_pages( -# session, item._id, page_number, page_size=number_of_rows_per_page -# ): -# stream_hash_lower = infohash.lower() - -# if stream_hash_lower in processed_stream_hashes: -# continue - -# processed_stream_hashes.add(stream_hash_lower) -# stream_hashes[stream_hash_lower] = stream - -# cached_hashes = self.get_torrent_cached(list(stream_hashes.keys())) -# if cached_hashes: -# for cache in cached_hashes.values(): -# item.active_stream = cache -# if self.find_required_files(item, cache["files"]): -# logger.log( -# "DEBRID", -# f"Item is cached, proceeding with: {item.log_string}", -# ) -# item.set( -# "active_stream", -# { -# "hash": cache["hash"], -# "files": cache["files"], -# "id": None, -# }, -# ) -# self.download(item) -# return_value = True -# break -# else: -# stream = stream_hashes.get(cache["hash"].lower()) -# if stream: -# stream.blacklisted = True -# else: -# logger.log("DEBRID", f"Item is not cached: {item.log_string}") -# for stream in stream_hashes.values(): -# logger.log( -# "DEBUG", -# f"Blacklisting uncached hash ({stream.infohash}) for item: {item.log_string}", -# ) -# stream.blacklisted = True - -# return return_value - -# def get_cached_hashes(self, item: MediaItem, streams: list[str]) -> list[str]: -# """Check if the item is cached in torbox.app""" -# cached_hashes = self.get_torrent_cached(streams) -# return { -# stream: cached_hashes[stream]["files"] -# for stream in streams -# if stream in cached_hashes -# } - -# def get_cached_hashes( -# self, item: MediaItem, streams: list[str:Stream] -# ) -> list[str]: -# """Check if the item is cached in torbox.app""" -# cached_hashes = self.get_torrent_cached(streams) -# return { -# stream: cached_hashes[stream]["files"] -# for stream in streams -# if stream in cached_hashes -# } - -# def download_cached(self, item: MediaItem, stream: str) -> None: -# """Download the cached item from torbox.app""" -# cache = self.get_torrent_cached([stream])[stream] -# item.active_stream = cache -# self.download(item) - -# def find_required_files(self, item, container): - -# files = [ -# file -# for file in container -# if file -# and file["size"] > 10000 -# and splitext(file["name"].lower())[1] in WANTED_FORMATS -# ] - -# parsed_file = parse(file["name"]) - -# if item.type == "movie": -# for file in files: -# if parsed_file.type == "movie": -# return [file] -# if item.type == "show": -# # Create a dictionary to map seasons and episodes needed -# needed_episodes = {} -# acceptable_states = [ -# States.Indexed, -# States.Scraped, -# States.Unknown, -# States.Failed, -# ] - -# for season in item.seasons: -# if season.state in acceptable_states and season.is_released: -# needed_episode_numbers = { -# episode.number -# for episode in season.episodes -# if episode.state in acceptable_states and episode.is_released -# } -# if needed_episode_numbers: -# needed_episodes[season.number] = needed_episode_numbers -# if not needed_episodes: -# return False - -# # Iterate over each file to check if it matches -# # the season and episode within the show -# matched_files = [] -# for file in files: -# if not parsed_file.seasons or parsed_file.seasons == [0]: -# continue - -# # Check each season and episode to find a match -# for season_number, episodes in needed_episodes.items(): -# if season_number in parsed_file.season: -# for episode_number in list(episodes): -# if episode_number in parsed_file.episode: -# # Store the matched file for this episode -# matched_files.append(file) -# episodes.remove(episode_number) -# if not matched_files: -# return False - -# if all(len(episodes) == 0 for episodes in needed_episodes.values()): -# return matched_files -# if item.type == "season": -# needed_episodes = { -# episode.number: episode -# for episode in item.episodes -# if episode.state -# in [States.Indexed, States.Scraped, States.Unknown, States.Failed] -# } -# one_season = len(item.parent.seasons) == 1 - -# # Dictionary to hold the matched files for each episode -# matched_files = [] -# season_num = item.number - -# # Parse files once and assign to episodes -# for file in files: -# if not file or not file.get("name"): -# continue -# if not parsed_file.seasons or parsed_file.seasons == [ -# 0 -# ]: # skip specials -# continue -# # Check if the file's season matches the item's season or if there's only one season -# if season_num in parsed_file.seasons or one_season: -# for ep_num in parsed_file.episodes: -# if ep_num in needed_episodes: -# matched_files.append(file) -# if not matched_files: -# return False - -# # Check if all needed episodes are captured (or atleast half) -# if len(needed_episodes) == len(matched_files): -# return matched_files -# if item.type == "episode": -# for file in files: -# if not file or not file.get("name"): -# continue -# if ( -# item.number in parsed_file.episodes -# and item.parent.number in parsed_file.seasons -# ): -# return [file] - -# return [] - -# def download(self, item: MediaItem): -# # Check if the torrent already exists -# exists = False -# torrent_list = self.get_torrent_list() -# for torrent in torrent_list: -# if item.active_stream["hash"] == torrent["hash"]: -# id = torrent["id"] -# exists = True -# break - -# # If it doesnt, lets download it and refresh the torrent_list -# if not exists: -# id = self.add_torrent(item.active_stream["hash"]) -# torrent_list = self.get_torrent_list() - -# # Find the torrent, correct file and we gucci -# for torrent in torrent_list: -# if torrent["id"] == id: -# if item.type == "movie": -# file = self.find_required_files(item, item.active_stream["files"])[ -# 0 -# ] -# _file_path = Path(file["name"]) -# item.set("folder", _file_path.parent.name) -# item.set("alternative_folder", ".") -# item.set("file", _file_path.name) -# if item.type == "show": -# files = self.find_required_files(item, item.active_stream["files"]) -# for season in item.seasons: -# for episode in season.episodes: -# file = self.find_required_files(episode, files)[0] -# _file_path = Path(file["name"]) -# episode.set("folder", _file_path.parent.name) -# episode.set("alternative_folder", ".") -# episode.set("file", _file_path.name) -# if item.type == "season": -# files = self.find_required_files(item, item.active_stream["files"]) -# for episode in item.episodes: -# file = self.find_required_files(episode, files)[0] -# _file_path = Path(file["name"]) -# episode.set("folder", _file_path.parent.name) -# episode.set("alternative_folder", ".") -# episode.set("file", _file_path.name) -# if item.type == "episode": -# file = self.find_required_files(episode, files)[0] -# _file_path = Path(file["name"]) -# item.set("folder", _file_path.parent.name) -# item.set("alternative_folder", ".") -# item.set("file", _file_path.name) -# logger.log("DEBRID", f"Downloaded {item.log_string}") - -# def get_torrent_cached(self, hash_list): -# hash_string = ",".join(hash_list) -# response = get( -# f"{self.base_url}/torrents/checkcached?hash={hash_string}&list_files=True", -# headers=self.headers, -# response_type=dict, -# ) -# return response.data["data"] - -# def add_torrent(self, infohash) -> int: -# magnet_url = f"magnet:?xt=urn:btih:{infohash}&dn=&tr=" -# response = post( -# f"{self.base_url}/torrents/createtorrent", -# data={"magnet": magnet_url, "seed": 1, "allow_zip": False}, -# headers=self.headers, -# ) -# return response.data.data.torrent_id - -# def get_torrent_list(self) -> list: -# response = get( -# f"{self.base_url}/torrents/mylist?bypass_cache=true", -# headers=self.headers, -# response_type=dict, -# ) -# return response.data["data"] - import time from datetime import datetime -from enum import Enum -from typing import Dict, List, Optional, Union - +from typing import List, Optional, Union from loguru import logger -from pydantic import BaseModel from requests import Session from program.settings.manager import settings_manager @@ -350,38 +12,27 @@ create_service_session, get_rate_limit_params, ) +from program.services.downloaders.models import TorrentContainer, DebridFile, TorrentInfo -from .shared import VIDEO_EXTENSIONS, DownloaderBase, FileFinder, premium_days_left +from .shared import DownloaderBase, premium_days_left -class TBTorrentStatus(str, Enum): - """Real-Debrid torrent status enumeration""" - MAGNET_ERROR = "magnet_error" - MAGNET_CONVERSION = "magnet_conversion" - WAITING_FILES = "waiting_files_selection" - DOWNLOADING = "downloading" - DOWNLOADED = "downloaded" - ERROR = "error" - SEEDING = "seeding" - DEAD = "dead" - UPLOADING = "uploading" - COMPRESSING = "compressing" +# class TBTorrentStatus(str, Enum): +# """Torbox torrent status enumeration""" +# MAGNET_ERROR = "magnet_error" +# MAGNET_CONVERSION = "magnet_conversion" +# WAITING_FILES = "waiting_files_selection" +# DOWNLOADING = "downloading" +# DOWNLOADED = "downloaded" +# ERROR = "error" +# SEEDING = "seeding" +# DEAD = "dead" +# UPLOADING = "uploading" +# COMPRESSING = "compressing" -class TBTorrent(BaseModel): - """Real-Debrid torrent model""" - id: str - hash: str - filename: str - bytes: int - status: TBTorrentStatus - added: datetime - links: List[str] - ended: Optional[datetime] = None - speed: Optional[int] = None - seeders: Optional[int] = None class TorBoxError(Exception): - """Base exception for Real-Debrid related errors""" + """Base exception for TorBox related errors""" class TorBoxRequestHandler(BaseRequestHandler): def __init__(self, session: Session, base_url: str, request_logging: bool = False): @@ -417,7 +68,6 @@ def __init__(self): self.key = "torbox" self.settings = settings_manager.settings.downloaders.torbox self.api = None - self.file_finder = None self.initialized = self.validate() def validate(self) -> bool: @@ -430,9 +80,8 @@ def validate(self) -> bool: self.api = TorBoxAPI( api_key=self.settings.api_key, - # proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None + proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None ) - self.file_finder = FileFinder("short_name", "size") return self._validate_premium() @@ -443,9 +92,6 @@ def _validate_settings(self) -> bool: if not self.settings.api_key: logger.warning("TorBox API key is not set") return False - # if self.settings.proxy_enabled and not self.settings.proxy_url: - # logger.error("Proxy is enabled but no proxy URL is provided") - # return False return True def _validate_premium(self) -> bool: @@ -466,16 +112,9 @@ def _validate_premium(self) -> bool: logger.error(f"Failed to validate premium status: {e}") return False - # TODO - def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: - """ - Get instant availability for multiple infohashes with retry logic - Required by DownloaderBase - """ - - if len(infohashes) == 0: - return {} - + def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: + """Get instant availability for multiple infohashes with retry logic""" + results = [] for attempt in range(self.MAX_RETRIES): try: response = self.api.request_handler.execute( @@ -483,24 +122,22 @@ def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: f"torrents/checkcached?hash={','.join(infohashes)}&format=list&list_files=true" ) - data = response.get("data") - + data: list = response["data"] if not data: - return {} - - # Return early if data is not a dict - if not isinstance(data, list): - logger.warning(f"Invalid instant availability data from TorBox, expected list, got {type(data)}") - return {} - - return { - entry['hash']: [{i: file for i, file in enumerate(entry['files'])}] - #entry['hash']: [{"1": entry['files']}] - for entry in data - if self._contains_valid_video_files(entry['files']) - # if isinstance(entry, dict) - } - + return results + + for torrent in data: + files = [] + for file in torrent["files"]: + debrid_file = DebridFile.create(file["name"], file["size"], item_type) + if debrid_file: + files.append(debrid_file) + if files: + results.append(TorrentContainer( + infohash=torrent["hash"], + files=files + )) + return results except Exception as e: logger.debug(f"Failed to get instant availability (attempt {attempt + 1}/{self.MAX_RETRIES}): {e}") if attempt < self.MAX_RETRIES - 1: @@ -508,34 +145,10 @@ def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: continue logger.debug("All retry attempts failed for instant availability") - return {} - - # def _filter_valid_containers(self, containers: List[dict]) -> List[dict]: - # """Filter and sort valid video containers""" - # valid_containers = [ - # container for container in containers - # if self._contains_valid_video_files(container) - # ] - # return sorted(valid_containers, key=len, reverse=True) - - def _contains_valid_video_files(self, container: dict) -> bool: - """Check if container has valid video files""" - return all( - any( - file["name"].endswith(ext) and "sample" not in file["name"].lower() - for ext in VIDEO_EXTENSIONS - ) - for file in container - ) + return [] def add_torrent(self, infohash: str) -> str: - """ - Add a torrent by infohash - Required by DownloaderBase - """ - if not self.initialized: - raise TorBoxError("Downloader not properly initialized") - + """Add a torrent by infohash""" try: magnet = f"magnet:?xt=urn:btih:{infohash}" response = self.api.request_handler.execute( @@ -548,48 +161,22 @@ def add_torrent(self, infohash: str) -> str: logger.error(f"Failed to add torrent {infohash}: {e}") raise - # TODO - def select_files(self, torrent_id: str, files: List[str]): - """ - Select files from a torrent - Required by DownloaderBase - """ - if not self.initialized: - raise TorBoxError("Downloader not properly initialized") - - # I think that's not required for TorBox - - # TODO - def get_torrent_info(self, torrent_id: str) -> dict: - """ - Get information about a torrent - Required by DownloaderBase - """ - if not self.initialized: - raise TorBoxError("Downloader not properly initialized") - - # Does TorBox have a method to get torrent info? - - # try: - # return self.api.request_handler.execute(HttpMethod.GET, f"torrents/torrentinfo/{torrent_id}")['data'] - # except Exception as e: - # logger.error(f"Failed to get torrent info for {torrent_id}: {e}") - # raise - - # TODO - def delete_torrent(self, torrent_id: str): - """ - Delete a torrent - Required by DownloaderBase - """ + def select_files(self, *args) -> None: + """Select files from a torrent""" + pass - if not self.initialized: - raise TorBoxError("Downloader not properly initialized") - - logger.debug(f"Deleting torrent {torrent_id}") + def get_torrent_info(self, torrent_id: str) -> TorrentInfo: + """Get information about a torrent""" + try: + return self.api.request_handler.execute(HttpMethod.GET, f"torrents/torrentinfo/{torrent_id}")['data'] + except Exception as e: + logger.error(f"Failed to get torrent info for {torrent_id}: {e}") + raise + def delete_torrent(self, torrent_id: str) -> None: + """Delete a torrent""" try: self.api.request_handler.execute(HttpMethod.POST, f"torrents/controltorrent", data={"torrent_id": torrent_id, "operation": "delete"}) except Exception as e: logger.error(f"Failed to delete torrent {torrent_id}: {e}") - raise \ No newline at end of file + raise diff --git a/src/program/services/scrapers/torrentio.py b/src/program/services/scrapers/torrentio.py index 317cedab..128c20c9 100644 --- a/src/program/services/scrapers/torrentio.py +++ b/src/program/services/scrapers/torrentio.py @@ -28,6 +28,7 @@ def __init__(self): rate_limit_params = get_rate_limit_params(max_calls=1, period=5) if self.settings.ratelimit else None session = create_service_session(rate_limit_params=rate_limit_params) self.request_handler = ScraperRequestHandler(session) + self.headers = {"User-Agent": "Mozilla/5.0"} self.initialized: bool = self.validate() if not self.initialized: return @@ -45,7 +46,7 @@ def validate(self) -> bool: return False try: url = f"{self.settings.url}/{self.settings.filter}/manifest.json" - response = self.request_handler.execute(HttpMethod.GET, url, timeout=10) + response = self.request_handler.execute(HttpMethod.GET, url, timeout=10, headers=self.headers) if response.is_ok: return True except Exception as e: @@ -73,7 +74,7 @@ def scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]: if identifier: url += identifier - response = self.request_handler.execute(HttpMethod.GET, f"{url}.json", timeout=self.timeout) + response = self.request_handler.execute(HttpMethod.GET, f"{url}.json", timeout=self.timeout, headers=self.headers) if not response.is_ok or not hasattr(response.data, 'streams') or not response.data.streams: logger.log("NOT_FOUND", f"No streams found for {item.log_string}") return {} diff --git a/src/program/settings/models.py b/src/program/settings/models.py index d3a0c27c..f9726802 100644 --- a/src/program/settings/models.py +++ b/src/program/settings/models.py @@ -57,6 +57,8 @@ class AllDebridModel(Observable): class TorboxModel(Observable): enabled: bool = False api_key: str = "" + proxy_enabled: bool = False + proxy_url: str = "" class DownloadersModel(Observable): diff --git a/src/program/types.py b/src/program/types.py index de8d0806..d3fdcef1 100644 --- a/src/program/types.py +++ b/src/program/types.py @@ -10,7 +10,10 @@ PlexWatchlist, TraktContent, ) -from program.services.downloaders import AllDebridDownloader, RealDebridDownloader +from program.services.downloaders import ( + # AllDebridDownloader, + RealDebridDownloader +) # TorBoxDownloader, from program.services.libraries import SymlinkLibrary @@ -31,9 +34,12 @@ # Typehint classes Scraper = Union[Scraping, Torrentio, Knightcrawler, Mediafusion, Orionoid, Jackett, TorBoxScraper, Zilean, Comet] Content = Union[Overseerr, PlexWatchlist, Listrr, Mdblist, TraktContent] -Downloader = Union[RealDebridDownloader, - # TorBoxDownloader, - AllDebridDownloader] +Downloader = Union[ + RealDebridDownloader, + # TorBoxDownloader, + # AllDebridDownloader +] + Service = Union[Content, SymlinkLibrary, Scraper, Downloader, Symlinker, Updater] MediaItemGenerator = Generator[MediaItem, None, MediaItem | None] diff --git a/src/routers/secure/scrape.py b/src/routers/secure/scrape.py index 570c5f53..99ab0d46 100644 --- a/src/routers/secure/scrape.py +++ b/src/routers/secure/scrape.py @@ -1,5 +1,6 @@ import asyncio from datetime import datetime, timedelta +import re from typing import Dict, List, Literal, Optional, TypeAlias, Union from uuid import uuid4 @@ -14,7 +15,6 @@ from program.media.item import Episode, MediaItem from program.media.stream import Stream as ItemStream from program.services.downloaders import Downloader -from program.services.downloaders.shared import hash_from_uri from program.services.indexers.trakt import TraktIndexer from program.services.scrapers import Scraping from program.services.scrapers.shared import rtn @@ -240,7 +240,15 @@ async def start_manual_session( magnet: str ) -> StartSessionResponse: session_manager.cleanup_expired(background_tasks) - info_hash = hash_from_uri(magnet).lower() + + def get_info_hash(magnet: str) -> str: + pattern = r"[A-Fa-f0-9]{40}" + match = re.search(pattern, magnet) + return match.group(0) if match else None + + info_hash = get_info_hash(magnet) + if not info_hash: + raise HTTPException(status_code=400, detail="Invalid magnet URI") # Identify item based on IMDb or database ID if item_id.startswith("tt"): From 994f021c25a8e6556c2287c0f4d5052bf14600de Mon Sep 17 00:00:00 2001 From: Spoked Date: Sun, 24 Nov 2024 10:29:17 -0500 Subject: [PATCH 03/12] fix: fixed torbox id handling --- src/program/services/downloaders/__init__.py | 3 +-- src/program/services/downloaders/models.py | 18 +++++++++--------- src/program/services/downloaders/shared.py | 8 ++++---- src/program/services/downloaders/torbox.py | 11 +++++++++-- 4 files changed, 23 insertions(+), 17 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index ae1d07bc..00ac5e53 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -41,7 +41,6 @@ def run(self, item: MediaItem): logger.debug(f"Running downloader for {item.log_string} ({item.id})") chunk_size = 10 for i in range(0, len(item.streams), chunk_size): - logger.debug(f"Processing chunk {i} to {i + chunk_size} of {len(item.streams)} for {item.log_string}") chunk: List[Stream] = item.streams[i:i + chunk_size] response: List[TorrentContainer] = self.get_instant_availability([stream.infohash for stream in chunk], item.type) for container in response: @@ -106,7 +105,7 @@ def get_instant_availability(self, infohashes: list[str], item_type: str) -> Lis """Check if the torrent is cached""" return self.service.get_instant_availability(infohashes, item_type) - def add_torrent(self, infohash: str) -> str: + def add_torrent(self, infohash: str) -> int: """Add a torrent by infohash""" return self.service.add_torrent(infohash) diff --git a/src/program/services/downloaders/models.py b/src/program/services/downloaders/models.py index 9998064a..ff4ade67 100644 --- a/src/program/services/downloaders/models.py +++ b/src/program/services/downloaders/models.py @@ -45,17 +45,19 @@ class DebridFile(BaseModel): filesize: Optional[int] = None @classmethod - def create(cls, filename: str, filesize: int, filetype: str) -> Optional["DebridFile"]: + def create(cls, filename: str, filesize_bytes: int, filetype: Literal["movie", "episode"]) -> Optional["DebridFile"]: """Factory method to validate and create a DebridFile""" if not any(filename.endswith(ext) for ext in VIDEO_EXTENSIONS) and not "sample" in filename.lower(): return None + + filesize_mb = filesize_bytes / 1_000_000 if filetype == "movie": - if not (FILESIZE_MOVIE_CONSTRAINT[0] <= filesize <= FILESIZE_MOVIE_CONSTRAINT[1]): + if not (FILESIZE_MOVIE_CONSTRAINT[0] <= filesize_mb <= FILESIZE_MOVIE_CONSTRAINT[1]): return None elif filetype == "episode": - if not (FILESIZE_EPISODE_CONSTRAINT[0] <= filesize <= FILESIZE_EPISODE_CONSTRAINT[1]): + if not (FILESIZE_EPISODE_CONSTRAINT[0] <= filesize_mb <= FILESIZE_EPISODE_CONSTRAINT[1]): return None - return cls(filename=filename, filesize=filesize) + return cls(filename=filename, filesize=filesize_bytes) class ParsedFileData(BaseModel): @@ -78,14 +80,12 @@ def cached(self) -> bool: class TorrentInfo(BaseModel): """Torrent information from a debrid service""" - id: str - infohash: str = Field(default=None) - filename: str = Field(default=None) + id: int + name: str status: str = Field(default=None) + infohash: str = Field(default=None) progress: float = Field(default=None) bytes: int = Field(default=None) - speed: int = Field(default=None) - seeders: int = Field(default=None) created_at: datetime = Field(default=None) expires_at: datetime = Field(default=None) completed_at: datetime = Field(default=None) diff --git a/src/program/services/downloaders/shared.py b/src/program/services/downloaders/shared.py index b796cf9b..e7ff5395 100644 --- a/src/program/services/downloaders/shared.py +++ b/src/program/services/downloaders/shared.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from datetime import datetime from typing import List -from RTN import parse +from RTN import ParsedData, parse from program.services.downloaders.models import ParsedFileData, TorrentInfo, TorrentContainer @@ -33,7 +33,7 @@ def get_instant_availability(self, infohashes: List[str]) -> List[TorrentContain pass @abstractmethod - def add_torrent(self, infohash: str) -> str: + def add_torrent(self, infohash: str) -> int: """ Add a torrent and return its information @@ -81,8 +81,8 @@ def delete_torrent(self, torrent_id: str) -> None: def parse_filename(filename: str) -> ParsedFileData: """Parse a filename into a ParsedFileData object""" - parsed_data = parse(filename) - season = parsed_data.seasons[0] if parsed_data.seasons else None + parsed_data: ParsedData = parse(filename) + season: int | None = parsed_data.seasons[0] if parsed_data.seasons else None return ParsedFileData(item_type=parsed_data.type, season=season, episodes=parsed_data.episodes) diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py index 55016508..ff0b2f46 100644 --- a/src/program/services/downloaders/torbox.py +++ b/src/program/services/downloaders/torbox.py @@ -166,9 +166,16 @@ def select_files(self, *args) -> None: pass def get_torrent_info(self, torrent_id: str) -> TorrentInfo: - """Get information about a torrent""" + """Get information about a torrent using a torrent ID""" try: - return self.api.request_handler.execute(HttpMethod.GET, f"torrents/torrentinfo/{torrent_id}")['data'] + data = self.api.request_handler.execute(HttpMethod.GET, f"torrents/mylist?id={torrent_id}")['data'] + return TorrentInfo( + id=data["id"], + name=data["name"], # points to dir + infohash=data["hash"], + status=data["download_state"], + bytes=data["size"] + ) except Exception as e: logger.error(f"Failed to get torrent info for {torrent_id}: {e}") raise From 1df3024048d0aac7fcba4e5aec9b4f56628c738f Mon Sep 17 00:00:00 2001 From: Spoked Date: Sun, 24 Nov 2024 11:11:50 -0500 Subject: [PATCH 04/12] fix: mediafusion incorrectly parsing titles. housekeeping downloader --- src/program/services/downloaders/__init__.py | 33 ++++++++++++-------- src/program/services/downloaders/models.py | 2 +- src/program/services/scrapers/mediafusion.py | 2 +- 3 files changed, 22 insertions(+), 15 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index 00ac5e53..86e1a6cc 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -43,35 +43,42 @@ def run(self, item: MediaItem): for i in range(0, len(item.streams), chunk_size): chunk: List[Stream] = item.streams[i:i + chunk_size] response: List[TorrentContainer] = self.get_instant_availability([stream.infohash for stream in chunk], item.type) - for container in response: - stream: Stream = next((s for s in chunk if s.infohash == container.infohash), None) + for stream in chunk: + container: TorrentContainer = next((c for c in response if c.infohash == stream.infohash), None) download_result = None try: - if not container.cached: + if container and container.cached: + # Handle cached streams + download_result: DownloadedTorrent = self.download_cached_stream(stream, container) + else: + # Handle uncached streams (not implemented) raise NotCachedException("Not cached!") - download_result: DownloadedTorrent = self.download_cached_stream(stream, container) if download_result: logger.log("DEBRID", f"Downloaded {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") - if not self.update_item_attributes(item, download_result): - raise NoMatchingFilesException("No matching files found!") - break + if not self.update_item_attributes(item, download_result): + raise NoMatchingFilesException("No matching files found!") + break except Exception as e: logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") - if download_result and download_result.torrent_id: - self.service.delete_torrent(download_result.torrent_id) + if download_result and download_result.id: + self.service.delete_torrent(download_result.id) item.blacklist_stream(stream) yield item def download_cached_stream(self, stream: Stream, container: TorrentContainer) -> DownloadedTorrent: """Download a cached stream""" - torrent_id: str = self.add_torrent(stream.infohash) + torrent_id: int = self.add_torrent(stream.infohash) info: TorrentInfo = self.get_torrent_info(torrent_id) self.select_files(torrent_id, container) - return DownloadedTorrent(id=torrent_id, info=info, infohash=container.infohash, container=container) + return DownloadedTorrent(id=torrent_id, info=info, infohash=stream.infohash, container=container) + + def download_uncached_stream(self, stream: Stream) -> DownloadedTorrent: + """Download an uncached stream""" + pass def update_item_attributes(self, item: MediaItem, download_result: DownloadedTorrent) -> bool: """Update the item attributes with the downloaded files and active stream""" - if not any(download_result.infohash, download_result.info.id, download_result.info.filename): + if not any(download_result.infohash, download_result.info.id, download_result.info.name): return False item = item found = False @@ -97,7 +104,7 @@ def update_item_attributes(self, item: MediaItem, download_result: DownloadedTor def _update_attributes(self, item: Union[Movie, Episode], debrid_file: DebridFile, download_result: DownloadedTorrent) -> None: """Update the item attributes with the downloaded files and active stream""" item.file = debrid_file.filename - item.folder = download_result.info.filename + item.folder = download_result.info.name item.alternative_folder = download_result.info.alternative_filename item.active_stream = {"infohash": download_result.infohash, "id": download_result.info.id} diff --git a/src/program/services/downloaders/models.py b/src/program/services/downloaders/models.py index ff4ade67..6684366c 100644 --- a/src/program/services/downloaders/models.py +++ b/src/program/services/downloaders/models.py @@ -99,7 +99,7 @@ def size_mb(self) -> float: class DownloadedTorrent(BaseModel): """Represents the result of a download operation""" - id: str + id: int infohash: str container: TorrentContainer info: TorrentInfo diff --git a/src/program/services/scrapers/mediafusion.py b/src/program/services/scrapers/mediafusion.py index 64553ca0..5a5753b6 100644 --- a/src/program/services/scrapers/mediafusion.py +++ b/src/program/services/scrapers/mediafusion.py @@ -146,7 +146,7 @@ def scrape(self, item: MediaItem) -> tuple[Dict[str, str], int]: if not hasattr(stream, "description") and hasattr(stream, "title") and "rate-limit exceeded" in stream.title: raise RateLimitExceeded(f"Mediafusion rate-limit exceeded for item: {item.log_string}") description_split = stream.description.replace("📂 ", "") - raw_title = description_split.split("/")[0] or description_split.split("\n")[0] # we want the torrent name if possible + raw_title = description_split.split("\n")[0] info_hash = re.search(r"info_hash=([A-Za-z0-9]+)", stream.url).group(1) if info_hash and info_hash not in torrents: torrents[info_hash] = raw_title From 62c33de890f91a0744c0f1f44a6e272ffa6eac1b Mon Sep 17 00:00:00 2001 From: Spoked Date: Sun, 24 Nov 2024 13:39:09 -0500 Subject: [PATCH 05/12] fix: add global timeout of 15s --- src/program/managers/event_manager.py | 92 +++++++++++++++++++- src/program/services/downloaders/__init__.py | 3 +- src/program/services/downloaders/models.py | 4 +- src/program/services/downloaders/torbox.py | 11 ++- src/program/utils/request.py | 3 + 5 files changed, 105 insertions(+), 8 deletions(-) diff --git a/src/program/managers/event_manager.py b/src/program/managers/event_manager.py index 9c932de4..e4a9e767 100644 --- a/src/program/managers/event_manager.py +++ b/src/program/managers/event_manager.py @@ -1,5 +1,7 @@ import os +import sys import threading +import time import traceback from concurrent.futures import Future, ThreadPoolExecutor from datetime import datetime @@ -170,7 +172,6 @@ def submit_job(self, service, program, event=None): item (Event, optional): The event item to process. Defaults to None. """ log_message = f"Submitting service {service.__name__} to be executed" - item_id = None # Content services dont provide an event. if event: log_message += f" with {event.log_message}" @@ -186,6 +187,95 @@ def submit_job(self, service, program, event=None): sse_manager.publish_event("event_update", self.get_event_updates()) future.add_done_callback(lambda f:self._process_future(f, service)) + # For debugging purposes we can monitor the execution time of the service. (comment out above and uncomment below) + # def submit_job(self, service, program, event=None): + # """ + # Submits a job to be executed by the service. + + # Args: + # service (type): The service class to execute. + # program (Program): The program containing the service. + # item (Event, optional): The event item to process. Defaults to None. + # """ + # log_message = f"Submitting service {service.__name__} to be executed" + # if event: + # log_message += f" with {event.log_message}" + # logger.debug(log_message) + + # cancellation_event = threading.Event() + # executor = self._find_or_create_executor(service) + + # # Add start time to track execution duration + # start_time = datetime.now() + + # def _monitor_execution(future): + # """Monitor execution time and log if taking too long""" + # while not future.done(): + # execution_time = (datetime.now() - start_time).total_seconds() + # if execution_time > 180: # 3 minutes + # current_thread = None + # for thread in threading.enumerate(): + # if thread.name.startswith(service.__name__) and not thread.name.endswith('_monitor'): + # current_thread = thread + # break + + # if current_thread: + # # Get stack frames for the worker thread + # frames = sys._current_frames() + # thread_frame = None + # for thread_id, frame in frames.items(): + # if thread_id == current_thread.ident: + # thread_frame = frame + # break + + # if thread_frame: + # stack_trace = ''.join(traceback.format_stack(thread_frame)) + # else: + # stack_trace = "Could not get stack trace for worker thread" + # else: + # stack_trace = "Could not find worker thread" + + # logger.warning( + # f"Service {service.__name__} execution taking longer than 3 minutes!\n" + # f"Event: {event.log_message if event else 'No event'}\n" + # f"Execution time: {execution_time:.1f} seconds\n" + # f"Thread name: {current_thread.name if current_thread else 'Unknown'}\n" + # f"Thread alive: {current_thread.is_alive() if current_thread else 'Unknown'}\n" + # f"Stack trace:\n{stack_trace}" + # ) + + # # Cancel the future and kill the thread + # future.cancellation_event.set() + # future.cancel() + # if current_thread: + # logger.warning(f"Killing thread {current_thread.name} due to timeout") + # self._futures.remove(future) + # if event: + # self.remove_event_from_running(event) + # return # Exit the monitoring thread + + # time.sleep(60) # Check every minute + + # future = executor.submit(db_functions.run_thread_with_db_item, + # program.all_services[service].run, + # service, program, event, cancellation_event) + + # # Start monitoring thread + # monitor_thread = threading.Thread( + # target=_monitor_execution, + # args=(future,), + # name=f"{service.__name__}_monitor", + # daemon=True + # ) + # monitor_thread.start() + + # future.cancellation_event = cancellation_event + # if event: + # future.event = event + # self._futures.append(future) + # sse_manager.publish_event("event_update", self.get_event_updates()) + # future.add_done_callback(lambda f: self._process_future(f, service)) + def cancel_job(self, item_id: str, suppress_logs=False): """ Cancels a job associated with the given item. diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index 86e1a6cc..c210ff8b 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -78,7 +78,8 @@ def download_uncached_stream(self, stream: Stream) -> DownloadedTorrent: def update_item_attributes(self, item: MediaItem, download_result: DownloadedTorrent) -> bool: """Update the item attributes with the downloaded files and active stream""" - if not any(download_result.infohash, download_result.info.id, download_result.info.name): + if not download_result.container: + logger.error(f"No container found for {item.log_string} ({item.id})") return False item = item found = False diff --git a/src/program/services/downloaders/models.py b/src/program/services/downloaders/models.py index 6684366c..cd15bf5e 100644 --- a/src/program/services/downloaders/models.py +++ b/src/program/services/downloaders/models.py @@ -63,8 +63,8 @@ def create(cls, filename: str, filesize_bytes: int, filetype: Literal["movie", " class ParsedFileData(BaseModel): """Represents a parsed file from a filename""" item_type: Literal["movie", "show"] - season: int = Field(default=None) - episodes: list[int] = Field(default_factory=list) + season: Optional[int] = Field(default=None) + episodes: Optional[List[int]] = Field(default_factory=list) class TorrentContainer(BaseModel): diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py index ff0b2f46..0a474b32 100644 --- a/src/program/services/downloaders/torbox.py +++ b/src/program/services/downloaders/torbox.py @@ -53,6 +53,7 @@ class TorBoxAPI: def __init__(self, api_key: str, proxy_url: Optional[str] = None): self.api_key = api_key rate_limit_params = get_rate_limit_params(per_second=5) + self.timeout = 60 self.session = create_service_session(rate_limit_params=rate_limit_params) self.session.headers.update({"Authorization": f"Bearer {api_key}"}) if proxy_url: @@ -117,9 +118,11 @@ def get_instant_availability(self, infohashes: List[str], item_type: str) -> Lis results = [] for attempt in range(self.MAX_RETRIES): try: + hash_string = ','.join(infohashes) response = self.api.request_handler.execute( HttpMethod.GET, - f"torrents/checkcached?hash={','.join(infohashes)}&format=list&list_files=true" + f"torrents/checkcached?hash={hash_string}&format=list&list_files=true", + timeout=30 ) data: list = response["data"] @@ -180,10 +183,10 @@ def get_torrent_info(self, torrent_id: str) -> TorrentInfo: logger.error(f"Failed to get torrent info for {torrent_id}: {e}") raise - def delete_torrent(self, torrent_id: str) -> None: + def delete_torrent(self, torrent_id: int) -> None: """Delete a torrent""" try: - self.api.request_handler.execute(HttpMethod.POST, f"torrents/controltorrent", data={"torrent_id": torrent_id, "operation": "delete"}) + self.api.request_handler.execute(HttpMethod.POST, f"torrents/controltorrent", json={"torrent_id": str(torrent_id), "operation": "delete"}, timeout=15) except Exception as e: - logger.error(f"Failed to delete torrent {torrent_id}: {e}") + logger.error(f"Failed to delete torrent id {torrent_id}: {e}") raise diff --git a/src/program/utils/request.py b/src/program/utils/request.py index fde7d267..9505295a 100644 --- a/src/program/utils/request.py +++ b/src/program/utils/request.py @@ -126,6 +126,7 @@ def __init__(self, session: Session | LimiterSession, response_type: ResponseTyp self.BASE_REQUEST_PARAMS = base_params or BaseRequestParameters() self.custom_exception = custom_exception or Exception self.request_logging = request_logging + self.timeout = 15 def _request(self, method: HttpMethod, endpoint: str, ignore_base_url: Optional[bool] = None, overriden_response_type: ResponseType = None, **kwargs) -> ResponseObject: """Generic request handler with error handling, using kwargs for flexibility. @@ -147,6 +148,8 @@ def _request(self, method: HttpMethod, endpoint: str, ignore_base_url: Optional[ elif 'params' in kwargs and not kwargs['params']: del kwargs['params'] + kwargs.setdefault('timeout', self.timeout) + if self.request_logging: logger.debug(f"Making request to {url} with kwargs: {kwargs}") From bea85a33a41f87313f162cb1c366a8d67457bdc6 Mon Sep 17 00:00:00 2001 From: Spoked Date: Mon, 25 Nov 2024 01:44:05 -0500 Subject: [PATCH 06/12] fix: fixed rd instantavail endpoint --- src/program/services/downloaders/__init__.py | 136 ++++++++++++------ src/program/services/downloaders/models.py | 18 ++- .../services/downloaders/realdebrid.py | 70 ++++++++- src/program/services/downloaders/torbox.py | 10 +- src/program/utils/request.py | 2 +- 5 files changed, 175 insertions(+), 61 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index c210ff8b..e8fd4f3f 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -1,4 +1,5 @@ -from typing import List, Union +import time +from typing import List, Literal, Union from loguru import logger from program.media.item import MediaItem, Show, Season, Episode, Movie @@ -38,70 +39,119 @@ def validate(self): return True def run(self, item: MediaItem): - logger.debug(f"Running downloader for {item.log_string} ({item.id})") + logger.debug(f"Starting download process for {item.log_string} ({item.id})") chunk_size = 10 + download_success = False + for i in range(0, len(item.streams), chunk_size): chunk: List[Stream] = item.streams[i:i + chunk_size] - response: List[TorrentContainer] = self.get_instant_availability([stream.infohash for stream in chunk], item.type) - for stream in chunk: - container: TorrentContainer = next((c for c in response if c.infohash == stream.infohash), None) - download_result = None + valid_streams = self.validate_streams(chunk, item) + if not valid_streams: + logger.debug(f"No valid streams found in chunk {i // chunk_size}-{i + chunk_size} for {item.log_string} ({item.id}).") + continue + + for stream, container in valid_streams: try: - if container and container.cached: - # Handle cached streams - download_result: DownloadedTorrent = self.download_cached_stream(stream, container) - else: - # Handle uncached streams (not implemented) - raise NotCachedException("Not cached!") - if download_result: - logger.log("DEBRID", f"Downloaded {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") - if not self.update_item_attributes(item, download_result): - raise NoMatchingFilesException("No matching files found!") + download_result = self.download_cached_stream(stream, container) + if self.update_item_attributes(item, download_result): + logger.log("DEBRID", f"Successfully downloaded {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") + download_success = True break + else: + raise NoMatchingFilesException(f"No valid files found for stream {stream.infohash}") except Exception as e: - logger.debug(f"Invalid stream: {stream.infohash} - reason: {e}") - if download_result and download_result.id: + logger.debug(f"Stream {stream.infohash} failed: {e}") + if 'download_result' in locals() and download_result.id: self.service.delete_torrent(download_result.id) item.blacklist_stream(stream) + + if download_success: + break + + if not download_success: + logger.debug(f"Failed to download any streams for {item.log_string} ({item.id})") + yield item - def download_cached_stream(self, stream: Stream, container: TorrentContainer) -> DownloadedTorrent: - """Download a cached stream""" - torrent_id: int = self.add_torrent(stream.infohash) - info: TorrentInfo = self.get_torrent_info(torrent_id) - self.select_files(torrent_id, container) - return DownloadedTorrent(id=torrent_id, info=info, infohash=stream.infohash, container=container) + def validate_streams(self, streams: List[Stream], item: MediaItem) -> List[tuple[Stream, TorrentContainer]]: + """ + Validate streams by ensuring their files match the item's requirements. + """ + infohashes = [stream.infohash for stream in streams] + containers: List[TorrentContainer] = self.get_instant_availability(infohashes, item.type) + valid_streams = [] + + for stream in streams: + container = next((c for c in containers if c.infohash == stream.infohash and c.cached), None) + if not container: + logger.debug(f"Stream {stream.infohash} is not cached or valid.") + item.blacklist_stream(stream) + continue + + valid_files = [] + for file in container.files or []: + debrid_file = DebridFile.create( + filename=file.filename, + filesize_bytes=file.filesize, + filetype=item.type, + file_id=file.file_id + ) + if debrid_file: + valid_files.append(debrid_file) + + if valid_files: + container.files = valid_files + valid_streams.append((stream, container)) + else: + logger.debug(f"Stream {stream.infohash} has no valid files.") + item.blacklist_stream(stream) - def download_uncached_stream(self, stream: Stream) -> DownloadedTorrent: - """Download an uncached stream""" - pass + return valid_streams def update_item_attributes(self, item: MediaItem, download_result: DownloadedTorrent) -> bool: - """Update the item attributes with the downloaded files and active stream""" + """Update the item attributes with the downloaded files and active stream.""" if not download_result.container: logger.error(f"No container found for {item.log_string} ({item.id})") return False - item = item + found = False - container: List[DebridFile] = download_result.container.files - for file in container: + for file in download_result.container.files: file_data: ParsedFileData = parse_filename(file.filename) - if item.type == "movie" and file_data.item_type == "movie": - self._update_attributes(item, file, download_result) + if self.match_file_to_item(item, file_data, file, download_result): found = True break - elif item.type in ("show", "season", "episode"): - if not (file_data.season and file_data.episodes): - continue - show: Show = item if item.type == "show" else (item.parent if item.type == "season" else item.parent.parent) - season: Season = next((season for season in show.seasons if season.number == file_data.season), None) - for file_episode in file_data.episodes: - episode: Episode = next((episode for episode in season.episodes if episode.number == file_episode), None) - if episode and episode.state not in [States.Completed, States.Symlinked, States.Downloaded]: - self._update_attributes(episode, file, download_result) - found = True + return found + def match_file_to_item(self, item: MediaItem, file_data: ParsedFileData, file: DebridFile, download_result: DownloadedTorrent) -> bool: + """Check if the file matches the item and update attributes.""" + found = False + if item.type == "movie" and file_data.item_type == "movie": + self._update_attributes(item, file, download_result) + return True + + if item.type in ("show", "season", "episode"): + if not (file_data.season and file_data.episodes): + return False + + show: Show = item if item.type == "show" else (item.parent if item.type == "season" else item.parent.parent) + season: Season = next((season for season in show.seasons if season.number == file_data.season), None) + for file_episode in file_data.episodes: + episode: Episode = next((episode for episode in season.episodes if episode.number == file_episode), None) + if episode and episode.state not in [States.Completed, States.Symlinked, States.Downloaded]: + self._update_attributes(episode, file, download_result) + found = True + + return found + + def download_cached_stream(self, stream: Stream, container: TorrentContainer) -> DownloadedTorrent: + """Download a cached stream""" + torrent_id: int = self.add_torrent(stream.infohash) + info: TorrentInfo = self.get_torrent_info(torrent_id) + if container.file_ids: + self.select_files(torrent_id, container.file_ids) + return DownloadedTorrent(id=torrent_id, info=info, infohash=stream.infohash, container=container) + def _update_attributes(self, item: Union[Movie, Episode], debrid_file: DebridFile, download_result: DownloadedTorrent) -> None: """Update the item attributes with the downloaded files and active stream""" item.file = debrid_file.filename diff --git a/src/program/services/downloaders/models.py b/src/program/services/downloaders/models.py index cd15bf5e..1c408762 100644 --- a/src/program/services/downloaders/models.py +++ b/src/program/services/downloaders/models.py @@ -1,5 +1,5 @@ from datetime import datetime -from typing import List, Literal, Optional +from typing import Dict, List, Literal, Optional, Union from loguru import logger from pydantic import BaseModel, Field from program.settings.manager import settings_manager @@ -41,11 +41,12 @@ class NoMatchingFilesException(Exception): class DebridFile(BaseModel): """Represents a file in from a debrid service""" + file_id: Optional[int] = None filename: Optional[str] = None filesize: Optional[int] = None @classmethod - def create(cls, filename: str, filesize_bytes: int, filetype: Literal["movie", "episode"]) -> Optional["DebridFile"]: + def create(cls, filename: str, filesize_bytes: int, filetype: Literal["movie", "episode"], file_id: Optional[int] = None) -> Optional["DebridFile"]: """Factory method to validate and create a DebridFile""" if not any(filename.endswith(ext) for ext in VIDEO_EXTENSIONS) and not "sample" in filename.lower(): return None @@ -57,7 +58,7 @@ def create(cls, filename: str, filesize_bytes: int, filetype: Literal["movie", " elif filetype == "episode": if not (FILESIZE_EPISODE_CONSTRAINT[0] <= filesize_mb <= FILESIZE_EPISODE_CONSTRAINT[1]): return None - return cls(filename=filename, filesize=filesize_bytes) + return cls(filename=filename, filesize=filesize_bytes, file_id=file_id) class ParsedFileData(BaseModel): @@ -77,10 +78,14 @@ def cached(self) -> bool: """Check if the torrent is cached""" return len(self.files) > 0 + @property + def file_ids(self) -> List[int]: + """Get the file ids of the cached files""" + return [file.file_id for file in self.files if file.file_id is not None] class TorrentInfo(BaseModel): """Torrent information from a debrid service""" - id: int + id: Union[int, str] name: str status: str = Field(default=None) infohash: str = Field(default=None) @@ -89,7 +94,8 @@ class TorrentInfo(BaseModel): created_at: datetime = Field(default=None) expires_at: datetime = Field(default=None) completed_at: datetime = Field(default=None) - alternative_filename: str = Field(default=None) # Real-Debrid only + alternative_filename: str = Field(default=None) + files: Dict[int, Dict[str, int | str]] = Field(default_factory=dict) # Real-Debrid only @property def size_mb(self) -> float: @@ -99,7 +105,7 @@ def size_mb(self) -> float: class DownloadedTorrent(BaseModel): """Represents the result of a download operation""" - id: int + id: Union[int, str] infohash: str container: TorrentContainer info: TorrentInfo diff --git a/src/program/services/downloaders/realdebrid.py b/src/program/services/downloaders/realdebrid.py index 469dbda1..8d0622b9 100644 --- a/src/program/services/downloaders/realdebrid.py +++ b/src/program/services/downloaders/realdebrid.py @@ -1,5 +1,6 @@ from datetime import datetime from enum import Enum +import time from typing import List, Optional, Union from loguru import logger @@ -14,9 +15,10 @@ create_service_session, get_rate_limit_params, ) -from program.services.downloaders.models import TorrentContainer, TorrentInfo +from program.services.downloaders.models import DebridFile, TorrentContainer, TorrentInfo from .shared import DownloaderBase, premium_days_left +from program.services.downloaders.models import VIDEO_EXTENSIONS class RDTorrentStatus(str, Enum): @@ -128,8 +130,51 @@ def _validate_premium(self) -> bool: def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: """Get instant availability for multiple infohashes with retry logic""" - # Real-Debrid does not support instant availability anymore - return [] + # Real-Debrid does not support instant availability anymore so lets create a makeshift one! + containers: List[TorrentContainer] = [] + torrent_id = None + break_loop = False + for infohash in infohashes: + try: + # lets go over all the hashes and add them to see what files they have + torrent_id = self.add_torrent(infohash) + torrent_info = self.get_torrent_info(torrent_id) + if torrent_info.status == "waiting_files_selection": + ids = [ + file_id for file_id in torrent_info.files.keys() + if torrent_info.files[file_id]["filename"].endswith(tuple(ext.lower() for ext in VIDEO_EXTENSIONS)) + ] + if not ids: + logger.debug(f"No video files found in torrent {torrent_id} with infohash {infohash}") + continue + self.select_files(torrent_id, ids) + torrent_info = self.get_torrent_info(torrent_id) + if torrent_info.status != "downloaded": + # this isnt cached, so we skip it + logger.debug(f"Torrent {torrent_id} with infohash {infohash} is not cached") + continue + if torrent_info.files: + torrent_files = [ + file for file in ( + DebridFile.create(file_info["filename"], file_info["bytes"], item_type, file_id) + for file_id, file_info in torrent_info.files.items() + ) if file is not None + ] + if torrent_files: + container = TorrentContainer(infohash=infohash, files=torrent_files) + containers.append(container) + except Exception as e: + logger.error(f"Failed to get instant availability for {infohash}: {e}") + break_loop = True + finally: + # Delete the torrent because we don't need it anymore + # we just wanted to know what files are inside + if torrent_id: + self.delete_torrent(torrent_id) + torrent_id = None + if break_loop: + break + return containers def add_torrent(self, infohash: str) -> str: """Add a torrent by infohash""" @@ -145,14 +190,16 @@ def add_torrent(self, infohash: str) -> str: logger.error(f"Failed to add torrent {infohash}: {e}") raise - def select_files(self, torrent_id: str, files: TorrentContainer) -> None: + def select_files(self, torrent_id: str, ids: List[int] = None) -> None: """Select files from a torrent""" try: + selection = ",".join(str(file_id) for file_id in ids) if ids else "all" self.api.request_handler.execute( HttpMethod.POST, f"torrents/selectFiles/{torrent_id}", - data={"files": "all"} + data={"files": selection} ) + time.sleep(1) except Exception as e: logger.error(f"Failed to select files for torrent {torrent_id}: {e}") raise @@ -161,7 +208,18 @@ def get_torrent_info(self, torrent_id: str) -> TorrentInfo: """Get information about a torrent""" try: data = self.api.request_handler.execute(HttpMethod.GET, f"torrents/info/{torrent_id}") - return TorrentInfo(**data) + files = {file["id"]: {"filename": file["path"].split("/")[-1], "bytes": file["bytes"]} for file in data["files"]} + return TorrentInfo( + id=data["id"], + name=data["filename"], + status=data["status"], + infohash=data["hash"], + bytes=data["bytes"], + created_at=data["added"], + alternative_filename=data.get("original_filename", None), + progress=data.get("progress", None), + files=files, + ) except Exception as e: logger.error(f"Failed to get torrent info for {torrent_id}: {e}") raise diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py index 0a474b32..f72d8034 100644 --- a/src/program/services/downloaders/torbox.py +++ b/src/program/services/downloaders/torbox.py @@ -53,7 +53,6 @@ class TorBoxAPI: def __init__(self, api_key: str, proxy_url: Optional[str] = None): self.api_key = api_key rate_limit_params = get_rate_limit_params(per_second=5) - self.timeout = 60 self.session = create_service_session(rate_limit_params=rate_limit_params) self.session.headers.update({"Authorization": f"Bearer {api_key}"}) if proxy_url: @@ -122,7 +121,7 @@ def get_instant_availability(self, infohashes: List[str], item_type: str) -> Lis response = self.api.request_handler.execute( HttpMethod.GET, f"torrents/checkcached?hash={hash_string}&format=list&list_files=true", - timeout=30 + timeout=15 ) data: list = response["data"] @@ -157,7 +156,8 @@ def add_torrent(self, infohash: str) -> str: response = self.api.request_handler.execute( HttpMethod.POST, "torrents/createtorrent", - data={"magnet": magnet.lower()} + data={"magnet": magnet.lower()}, + timeout=15 ) return response["data"]["torrent_id"] except Exception as e: @@ -171,10 +171,10 @@ def select_files(self, *args) -> None: def get_torrent_info(self, torrent_id: str) -> TorrentInfo: """Get information about a torrent using a torrent ID""" try: - data = self.api.request_handler.execute(HttpMethod.GET, f"torrents/mylist?id={torrent_id}")['data'] + data = self.api.request_handler.execute(HttpMethod.GET, f"torrents/mylist?id={torrent_id}", timeout=15)['data'] return TorrentInfo( id=data["id"], - name=data["name"], # points to dir + name=data["name"].split("/")[-1], # points to dir infohash=data["hash"], status=data["download_state"], bytes=data["size"] diff --git a/src/program/utils/request.py b/src/program/utils/request.py index 9505295a..012b5062 100644 --- a/src/program/utils/request.py +++ b/src/program/utils/request.py @@ -148,7 +148,7 @@ def _request(self, method: HttpMethod, endpoint: str, ignore_base_url: Optional[ elif 'params' in kwargs and not kwargs['params']: del kwargs['params'] - kwargs.setdefault('timeout', self.timeout) + kwargs.setdefault("timeout", self.timeout) if self.request_logging: logger.debug(f"Making request to {url} with kwargs: {kwargs}") From 4f3bc51abab50b4cdb757754c995208688bef63a Mon Sep 17 00:00:00 2001 From: Gaisberg Date: Mon, 25 Nov 2024 16:26:41 +0200 Subject: [PATCH 07/12] fix: alldebrid --- src/program/services/downloaders/__init__.py | 7 +- src/program/services/downloaders/alldebrid.py | 490 +++++++++--------- src/program/services/downloaders/shared.py | 20 +- src/program/types.py | 9 +- 4 files changed, 264 insertions(+), 262 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index e8fd4f3f..332e199f 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -12,7 +12,7 @@ DownloadedTorrent, NoMatchingFilesException, NotCachedException ) -# from .alldebrid import AllDebridDownloader +from .alldebrid import AllDebridDownloader from .realdebrid import RealDebridDownloader from .torbox import TorBoxDownloader @@ -25,7 +25,7 @@ def __init__(self): self.services = { RealDebridDownloader: RealDebridDownloader(), TorBoxDownloader: TorBoxDownloader(), - # AllDebridDownloader: AllDebridDownloader() + AllDebridDownloader: AllDebridDownloader() } self.service = next((service for service in self.services.values() if service.initialized), None) self.initialized = self.validate() @@ -111,8 +111,7 @@ def validate_streams(self, streams: List[Stream], item: MediaItem) -> List[tuple def update_item_attributes(self, item: MediaItem, download_result: DownloadedTorrent) -> bool: """Update the item attributes with the downloaded files and active stream.""" if not download_result.container: - logger.error(f"No container found for {item.log_string} ({item.id})") - return False + raise NotCachedException(f"No container found for {item.log_string} ({item.id})") found = False for file in download_result.container.files: diff --git a/src/program/services/downloaders/alldebrid.py b/src/program/services/downloaders/alldebrid.py index ae5da5d8..47ca6eb3 100644 --- a/src/program/services/downloaders/alldebrid.py +++ b/src/program/services/downloaders/alldebrid.py @@ -1,244 +1,246 @@ -# from datetime import datetime -# from typing import Dict, Iterator, List, Optional, Tuple - -# from loguru import logger -# from requests import Session -# from requests.exceptions import ConnectTimeout - -# from program.settings.manager import settings_manager -# from program.utils.request import ( -# BaseRequestHandler, -# BaseRequestParameters, -# HttpMethod, -# ResponseType, -# create_service_session, -# get_rate_limit_params, -# ) - -# from .shared import DownloaderBase, premium_days_left - - -# class AllDebridError(Exception): -# """Base exception for AllDebrid related errors""" - -# class AllDebridBaseRequestParameters(BaseRequestParameters): -# """AllDebrid base request parameters""" -# agent: Optional[str] = None - -# class AllDebridRequestHandler(BaseRequestHandler): -# def __init__(self, session: Session, base_url: str, base_params: AllDebridBaseRequestParameters, request_logging: bool = False): -# super().__init__(session, response_type=ResponseType.DICT, base_url=base_url, base_params=base_params, custom_exception=AllDebridError, request_logging=request_logging) - -# def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> dict: -# response = super()._request(method, endpoint, **kwargs) -# if not response.is_ok or not response.data or "data" not in response.data: -# raise AllDebridError("Invalid response from AllDebrid") -# return response.data["data"] - -# class AllDebridAPI: -# """Handles AllDebrid API communication""" -# BASE_URL = "https://api.alldebrid.com/v4" -# AGENT = "Riven" - -# def __init__(self, api_key: str, proxy_url: Optional[str] = None): -# self.api_key = api_key -# rate_limit_params = get_rate_limit_params(per_minute=600, per_second=12) -# self.session = create_service_session(rate_limit_params=rate_limit_params) -# self.session.headers.update({ -# "Authorization": f"Bearer {api_key}" -# }) -# if proxy_url: -# self.session.proxies = {"http": proxy_url, "https": proxy_url} -# base_params = AllDebridBaseRequestParameters() -# base_params.agent = self.AGENT -# self.request_handler = AllDebridRequestHandler(self.session, self.BASE_URL, base_params) - - -# class AllDebridDownloader(DownloaderBase): -# """Main AllDebrid downloader class implementing DownloaderBase""" - -# def __init__(self): -# self.key = "alldebrid" -# self.settings = settings_manager.settings.downloaders.all_debrid -# self.api = None -# self.initialized = self.validate() - -# def validate(self) -> bool: -# """ -# Validate AllDebrid settings and premium status -# Required by DownloaderBase -# """ -# if not self._validate_settings(): -# return False - -# self.api = AllDebridAPI( -# api_key=self.settings.api_key, -# proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None -# ) - -# if not self._validate_premium(): -# return False - -# logger.success("AllDebrid initialized!") -# return True - -# def _validate_settings(self) -> bool: -# """Validate configuration settings""" -# if not self.settings.enabled: -# return False -# if not self.settings.api_key: -# logger.warning("AllDebrid API key is not set") -# return False -# if self.settings.proxy_enabled and not self.settings.proxy_url: -# logger.error("Proxy is enabled but no proxy URL is provided") -# return False -# return True - -# def _validate_premium(self) -> bool: -# """Validate premium status""" -# try: -# user_info = self.api.request_handler.execute(HttpMethod.GET, "user") -# user = user_info.get("user", {}) - -# if not user.get("isPremium", False): -# logger.error("Premium membership required") -# return False - -# expiration = datetime.utcfromtimestamp(user.get("premiumUntil", 0)) -# logger.log("DEBRID", premium_days_left(expiration)) -# return True - -# except ConnectTimeout: -# logger.error("Connection to AllDebrid timed out") -# except Exception as e: -# logger.error(f"Failed to validate premium status: {e}") -# return False - -# def get_instant_availability(self, infohashes: List[str]) -> Dict[str, list]: -# """ -# Get instant availability for multiple infohashes -# Required by DownloaderBase -# """ -# if not self.initialized: -# logger.error("Downloader not properly initialized") -# return {} - -# try: -# params = {f"magnets[{i}]": infohash for i, infohash in enumerate(infohashes)} -# response = self.api.request_handler.execute(HttpMethod.GET, "magnet/instant", **params) -# magnets = response.get("magnets", []) - -# availability = {} -# for magnet in magnets: -# if not isinstance(magnet, dict) or "files" not in magnet: -# continue - -# files = magnet.get("files", []) -# valid_files = self._process_files(files) - -# if valid_files: -# availability[magnet["hash"]] = [valid_files] - -# return availability - -# except Exception as e: -# logger.error(f"Failed to get instant availability: {e}") -# return {} - -# def _walk_files(self, files: List[dict]) -> Iterator[Tuple[str, int]]: -# """Walks nested files structure and yields filename, size pairs""" -# dirs = [] -# for file in files: -# try: -# size = int(file.get("s", "")) -# yield file.get("n", "UNKNOWN"), size -# except ValueError: -# dirs.append(file) - -# for directory in dirs: -# yield from self._walk_files(directory.get("e", [])) - -# def _process_files(self, files: List[dict]) -> Dict[str, dict]: -# """Process and filter valid video files""" -# result = {} -# for i, (name, size) in enumerate(self._walk_files(files)): -# if ( -# any(name.lower().endswith(ext) for ext in VIDEO_EXTENSIONS) -# and "sample" not in name.lower() -# ): -# result[str(i)] = {"filename": name, "filesize": size} -# return result - -# def add_torrent(self, infohash: str) -> str: -# """ -# Add a torrent by infohash -# Required by DownloaderBase -# """ -# if not self.initialized: -# raise AllDebridError("Downloader not properly initialized") - -# try: -# response = self.api.request_handler.execute( -# HttpMethod.GET, -# "magnet/upload", -# **{"magnets[]": infohash} -# ) -# magnet_info = response.get("magnets", [])[0] -# torrent_id = magnet_info.get("id") - -# if not torrent_id: -# raise AllDebridError("No torrent ID in response") - -# return str(torrent_id) - -# except Exception as e: -# logger.error(f"Failed to add torrent {infohash}: {e}") -# raise - -# def select_files(self, torrent_id: str, files: List[str]): -# """ -# Select files from a torrent -# Required by DownloaderBase -# """ -# if not self.initialized: -# raise AllDebridError("Downloader not properly initialized") - -# try: -# # AllDebrid doesn't have a separate file selection endpoint -# # All files are automatically selected when adding the torrent -# pass -# except Exception as e: -# logger.error(f"Failed to select files for torrent {torrent_id}: {e}") -# raise - -# def get_torrent_info(self, torrent_id: str) -> dict: -# """ -# Get information about a torrent -# Required by DownloaderBase -# """ -# if not self.initialized: -# raise AllDebridError("Downloader not properly initialized") - -# try: -# response = self.api.request_handler.execute(HttpMethod.GET, "magnet/status", id=torrent_id) -# info = response.get("magnets", {}) -# if "filename" not in info: -# raise AllDebridError("Invalid torrent info response") -# return info -# except Exception as e: -# logger.error(f"Failed to get torrent info for {torrent_id}: {e}") -# raise - -# def delete_torrent(self, torrent_id: str): -# """ -# Delete a torrent -# Required by DownloaderBase -# """ -# if not self.initialized: -# raise AllDebridError("Downloader not properly initialized") - -# try: -# self.api.request_handler.execute(HttpMethod.GET, "magnet/delete", id=torrent_id) -# except Exception as e: -# logger.error(f"Failed to delete torrent {torrent_id}: {e}") -# raise +from datetime import datetime +from typing import Dict, Iterator, List, Optional, Tuple + +from loguru import logger +from requests import Session +from requests.exceptions import ConnectTimeout + +from program.settings.manager import settings_manager +from program.utils.request import ( + BaseRequestHandler, + BaseRequestParameters, + HttpMethod, + ResponseType, + create_service_session, + get_rate_limit_params, +) +from program.services.downloaders.models import VIDEO_EXTENSIONS, DebridFile, TorrentContainer, TorrentInfo + +from .shared import DownloaderBase, premium_days_left + + +class AllDebridError(Exception): + """Base exception for AllDebrid related errors""" + +class AllDebridBaseRequestParameters(BaseRequestParameters): + """AllDebrid base request parameters""" + agent: Optional[str] = None + +class AllDebridRequestHandler(BaseRequestHandler): + def __init__(self, session: Session, base_url: str, base_params: AllDebridBaseRequestParameters, request_logging: bool = False): + super().__init__(session, response_type=ResponseType.DICT, base_url=base_url, base_params=base_params, custom_exception=AllDebridError, request_logging=request_logging) + + def execute(self, method: HttpMethod, endpoint: str, **kwargs) -> dict: + response = super()._request(method, endpoint, **kwargs) + if not response.is_ok or not response.data or "data" not in response.data: + raise AllDebridError("Invalid response from AllDebrid") + return response.data["data"] + +class AllDebridAPI: + """Handles AllDebrid API communication""" + BASE_URL = "https://api.alldebrid.com/v4" + AGENT = "Riven" + + def __init__(self, api_key: str, proxy_url: Optional[str] = None): + self.api_key = api_key + rate_limit_params = get_rate_limit_params(per_minute=600, per_second=12) + self.session = create_service_session(rate_limit_params=rate_limit_params) + self.session.headers.update({ + "Authorization": f"Bearer {api_key}" + }) + if proxy_url: + self.session.proxies = {"http": proxy_url, "https": proxy_url} + base_params = AllDebridBaseRequestParameters() + base_params.agent = self.AGENT + self.request_handler = AllDebridRequestHandler(self.session, self.BASE_URL, base_params) + + +class AllDebridDownloader(DownloaderBase): + """Main AllDebrid downloader class implementing DownloaderBase""" + + def __init__(self): + self.key = "alldebrid" + self.settings = settings_manager.settings.downloaders.all_debrid + self.api = None + self.initialized = self.validate() + + def validate(self) -> bool: + """ + Validate AllDebrid settings and premium status + Required by DownloaderBase + """ + if not self._validate_settings(): + return False + + self.api = AllDebridAPI( + api_key=self.settings.api_key, + proxy_url=self.settings.proxy_url if self.settings.proxy_enabled else None + ) + + if not self._validate_premium(): + return False + + logger.success("AllDebrid initialized!") + return True + + def _validate_settings(self) -> bool: + """Validate configuration settings""" + if not self.settings.enabled: + return False + if not self.settings.api_key: + logger.warning("AllDebrid API key is not set") + return False + if self.settings.proxy_enabled and not self.settings.proxy_url: + logger.error("Proxy is enabled but no proxy URL is provided") + return False + return True + + def _validate_premium(self) -> bool: + """Validate premium status""" + try: + user_info = self.api.request_handler.execute(HttpMethod.GET, "user") + user = user_info.get("user", {}) + + if not user.get("isPremium", False): + logger.error("Premium membership required") + return False + + expiration = datetime.utcfromtimestamp(user.get("premiumUntil", 0)) + logger.log("DEBRID", premium_days_left(expiration)) + return True + + except ConnectTimeout: + logger.error("Connection to AllDebrid timed out") + except Exception as e: + logger.error(f"Failed to validate premium status: {e}") + return False + + def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: + """ + Get instant availability for multiple infohashes + Required by DownloaderBase + """ + if not self.initialized: + logger.error("Downloader not properly initialized") + return {} + + try: + params = {f"magnets[{i}]": infohash for i, infohash in enumerate(infohashes)} + response = self.api.request_handler.execute(HttpMethod.GET, "magnet/instant", params=params) + magnets = response.get("magnets", []) + + availability = [] + for magnet in magnets: + if not isinstance(magnet, dict) or "files" not in magnet: + continue + + files = magnet.get("files", []) + valid_files = self._process_files(files) + + if valid_files: + availability.append(TorrentContainer(infohash=magnet["hash"], files=valid_files)) + + return availability + + except Exception as e: + logger.error(f"Failed to get instant availability: {e}") + return {} + + def _walk_files(self, files: List[dict]) -> Iterator[Tuple[str, int]]: + """Walks nested files structure and yields filename, size pairs""" + dirs = [] + for file in files: + try: + size = int(file.get("s", "")) + yield file.get("n", "UNKNOWN"), size + except ValueError: + dirs.append(file) + + for directory in dirs: + yield from self._walk_files(directory.get("e", [])) + + def _process_files(self, files: List[dict]) -> List[DebridFile]: + """Process and filter valid video files""" + result = [] + for i, (name, size) in enumerate(self._walk_files(files)): + if ( + any(name.lower().endswith(ext) for ext in VIDEO_EXTENSIONS) + and "sample" not in name.lower() + ): + result.append(DebridFile(file_id=i, filename=name, filesize=size)) + return result + + def add_torrent(self, infohash: str) -> str: + """ + Add a torrent by infohash + Required by DownloaderBase + """ + if not self.initialized: + raise AllDebridError("Downloader not properly initialized") + + try: + response = self.api.request_handler.execute( + HttpMethod.GET, + "magnet/upload", + params={"magnets[]": infohash} + ) + magnet_info = response.get("magnets", [])[0] + torrent_id = magnet_info.get("id") + + if not torrent_id: + raise AllDebridError("No torrent ID in response") + + return str(torrent_id) + + except Exception as e: + logger.error(f"Failed to add torrent {infohash}: {e}") + raise + + def select_files(self, torrent_id: str, _: List[str] = None) -> None: + """ + Select files from a torrent + Required by DownloaderBase + """ + try: + # AllDebrid doesn't have a separate file selection endpoint + # All files are automatically selected when adding the torrent + pass + except Exception as e: + logger.error(f"Failed to select files for torrent {torrent_id}: {e}") + raise + + def get_torrent_info(self, torrent_id: str) -> dict: + """ + Get information about a torrent + Required by DownloaderBase + """ + if not self.initialized: + raise AllDebridError("Downloader not properly initialized") + + try: + response = self.api.request_handler.execute(HttpMethod.GET, "magnet/status", params={"id": torrent_id}) + info = response.get("magnets", {}) + if "filename" not in info: + raise AllDebridError("Invalid torrent info response") + return TorrentInfo( + id=info["id"], + name=info["filename"], + status=info["status"], + bytes=info["size"], + created_at=info["uploadDate"], + progress=info["size"] / info["downloaded"] + ) + except Exception as e: + logger.error(f"Failed to get torrent info for {torrent_id}: {e}") + raise + + def delete_torrent(self, torrent_id: str): + """ + Delete a torrent + Required by DownloaderBase + """ + try: + self.api.request_handler.execute(HttpMethod.GET, "magnet/delete", params={"id": torrent_id}) + except Exception as e: + logger.error(f"Failed to delete torrent {torrent_id}: {e}") + raise diff --git a/src/program/services/downloaders/shared.py b/src/program/services/downloaders/shared.py index e7ff5395..2c656051 100644 --- a/src/program/services/downloaders/shared.py +++ b/src/program/services/downloaders/shared.py @@ -13,20 +13,20 @@ class DownloaderBase(ABC): def validate(self) -> bool: """ Validate the downloader configuration and premium status - + Returns: ValidationResult: Contains validation status and any error messages """ pass @abstractmethod - def get_instant_availability(self, infohashes: List[str]) -> List[TorrentContainer]: + def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: """ Get instant availability for multiple infohashes - + Args: infohashes: List of torrent hashes to check - + Returns: List[TorrentContainer]: Cached status and available files for each hash """ @@ -36,10 +36,10 @@ def get_instant_availability(self, infohashes: List[str]) -> List[TorrentContain def add_torrent(self, infohash: str) -> int: """ Add a torrent and return its information - + Args: infohash: The hash of the torrent to add - + Returns: str: The ID of the added torrent """ @@ -49,7 +49,7 @@ def add_torrent(self, infohash: str) -> int: def select_files(self, request: list[int]) -> None: """ Select which files to download from the torrent - + Args: request: File selection details including torrent ID and file IDs """ @@ -59,10 +59,10 @@ def select_files(self, request: list[int]) -> None: def get_torrent_info(self, torrent_id: str) -> TorrentInfo: """ Get information about a specific torrent using its ID - + Args: torrent_id: ID of the torrent to get info for - + Returns: TorrentInfo: Current information about the torrent """ @@ -72,7 +72,7 @@ def get_torrent_info(self, torrent_id: str) -> TorrentInfo: def delete_torrent(self, torrent_id: str) -> None: """ Delete a torrent from the service - + Args: torrent_id: ID of the torrent to delete """ diff --git a/src/program/types.py b/src/program/types.py index d3fdcef1..d49c179c 100644 --- a/src/program/types.py +++ b/src/program/types.py @@ -11,8 +11,9 @@ TraktContent, ) from program.services.downloaders import ( - # AllDebridDownloader, - RealDebridDownloader + AllDebridDownloader, + RealDebridDownloader, + TorBoxDownloader ) # TorBoxDownloader, @@ -36,8 +37,8 @@ Content = Union[Overseerr, PlexWatchlist, Listrr, Mdblist, TraktContent] Downloader = Union[ RealDebridDownloader, - # TorBoxDownloader, - # AllDebridDownloader + TorBoxDownloader, + AllDebridDownloader ] Service = Union[Content, SymlinkLibrary, Scraper, Downloader, Symlinker, Updater] From 805832d847abaedaed02f5d39475b87a5891f7be Mon Sep 17 00:00:00 2001 From: Spoked Date: Mon, 25 Nov 2024 15:21:33 -0500 Subject: [PATCH 08/12] fix: remove chunking on downloaders --- src/program/services/downloaders/__init__.py | 99 +++++++---------- src/program/services/downloaders/alldebrid.py | 27 +++-- src/program/services/downloaders/models.py | 8 +- .../services/downloaders/realdebrid.py | 103 ++++++++++-------- src/program/services/downloaders/shared.py | 11 +- src/program/services/downloaders/torbox.py | 41 ++++--- 6 files changed, 143 insertions(+), 146 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index 332e199f..1063e8b6 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -1,5 +1,4 @@ -import time -from typing import List, Literal, Union +from typing import List, Optional, Union from loguru import logger from program.media.item import MediaItem, Show, Season, Episode, Movie @@ -40,73 +39,59 @@ def validate(self): def run(self, item: MediaItem): logger.debug(f"Starting download process for {item.log_string} ({item.id})") - chunk_size = 10 download_success = False - for i in range(0, len(item.streams), chunk_size): - chunk: List[Stream] = item.streams[i:i + chunk_size] - valid_streams = self.validate_streams(chunk, item) - if not valid_streams: - logger.debug(f"No valid streams found in chunk {i // chunk_size}-{i + chunk_size} for {item.log_string} ({item.id}).") + for stream in item.streams: + container = self.validate_stream(stream, item) + if not container: + logger.debug(f"Stream {stream.infohash} is not cached or valid.") continue - for stream, container in valid_streams: - try: - download_result = self.download_cached_stream(stream, container) - if self.update_item_attributes(item, download_result): - logger.log("DEBRID", f"Successfully downloaded {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") - download_success = True - break - else: - raise NoMatchingFilesException(f"No valid files found for stream {stream.infohash}") - except Exception as e: - logger.debug(f"Stream {stream.infohash} failed: {e}") - if 'download_result' in locals() and download_result.id: - self.service.delete_torrent(download_result.id) - item.blacklist_stream(stream) - - if download_success: - break + try: + download_result = self.download_cached_stream(stream, container) + if self.update_item_attributes(item, download_result): + logger.log("DEBRID", f"Downloaded {item.log_string} from '{stream.raw_title}' [{stream.infohash}]") + download_success = True + break + else: + raise NoMatchingFilesException(f"No valid files found for stream {stream.infohash}") + except Exception as e: + logger.debug(f"Stream {stream.infohash} failed: {e}") + if 'download_result' in locals() and download_result.id: + self.service.delete_torrent(download_result.id) + item.blacklist_stream(stream) if not download_success: logger.debug(f"Failed to download any streams for {item.log_string} ({item.id})") yield item - def validate_streams(self, streams: List[Stream], item: MediaItem) -> List[tuple[Stream, TorrentContainer]]: + def validate_stream(self, stream: Stream, item: MediaItem) -> Optional[TorrentContainer]: """ - Validate streams by ensuring their files match the item's requirements. + Validate a single stream by ensuring its files match the item's requirements. """ - infohashes = [stream.infohash for stream in streams] - containers: List[TorrentContainer] = self.get_instant_availability(infohashes, item.type) - valid_streams = [] - - for stream in streams: - container = next((c for c in containers if c.infohash == stream.infohash and c.cached), None) - if not container: - logger.debug(f"Stream {stream.infohash} is not cached or valid.") - item.blacklist_stream(stream) - continue + container = self.get_instant_availability(stream.infohash, item.type) + if not container: + item.blacklist_stream(stream) + return None + + valid_files = [] + for file in container.files or []: + debrid_file = DebridFile.create( + filename=file.filename, + filesize_bytes=file.filesize, + filetype=item.type, + file_id=file.file_id + ) + if debrid_file: + valid_files.append(debrid_file) - valid_files = [] - for file in container.files or []: - debrid_file = DebridFile.create( - filename=file.filename, - filesize_bytes=file.filesize, - filetype=item.type, - file_id=file.file_id - ) - if debrid_file: - valid_files.append(debrid_file) - - if valid_files: - container.files = valid_files - valid_streams.append((stream, container)) - else: - logger.debug(f"Stream {stream.infohash} has no valid files.") - item.blacklist_stream(stream) + if valid_files: + container.files = valid_files + return container - return valid_streams + item.blacklist_stream(stream) + return None def update_item_attributes(self, item: MediaItem, download_result: DownloadedTorrent) -> bool: """Update the item attributes with the downloaded files and active stream.""" @@ -158,9 +143,9 @@ def _update_attributes(self, item: Union[Movie, Episode], debrid_file: DebridFil item.alternative_folder = download_result.info.alternative_filename item.active_stream = {"infohash": download_result.infohash, "id": download_result.info.id} - def get_instant_availability(self, infohashes: list[str], item_type: str) -> List[TorrentContainer]: + def get_instant_availability(self, infohash: str, item_type: str) -> List[TorrentContainer]: """Check if the torrent is cached""" - return self.service.get_instant_availability(infohashes, item_type) + return self.service.get_instant_availability(infohash, item_type) def add_torrent(self, infohash: str) -> int: """Add a torrent by infohash""" diff --git a/src/program/services/downloaders/alldebrid.py b/src/program/services/downloaders/alldebrid.py index 47ca6eb3..7d8a28b1 100644 --- a/src/program/services/downloaders/alldebrid.py +++ b/src/program/services/downloaders/alldebrid.py @@ -115,36 +115,35 @@ def _validate_premium(self) -> bool: logger.error(f"Failed to validate premium status: {e}") return False - def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: + def get_instant_availability(self, infohash: str, item_type: str) -> Optional[TorrentContainer]: """ - Get instant availability for multiple infohashes + Get instant availability for a single infohash Required by DownloaderBase """ if not self.initialized: logger.error("Downloader not properly initialized") - return {} + return None try: - params = {f"magnets[{i}]": infohash for i, infohash in enumerate(infohashes)} + params = {"magnets[]": infohash} response = self.api.request_handler.execute(HttpMethod.GET, "magnet/instant", params=params) magnets = response.get("magnets", []) - availability = [] - for magnet in magnets: - if not isinstance(magnet, dict) or "files" not in magnet: - continue + if not magnets or not isinstance(magnets[0], dict) or "files" not in magnets[0]: + return None - files = magnet.get("files", []) - valid_files = self._process_files(files) + magnet = magnets[0] + files = magnet.get("files", []) + valid_files = self._process_files(files) - if valid_files: - availability.append(TorrentContainer(infohash=magnet["hash"], files=valid_files)) + if valid_files: + return TorrentContainer(infohash=magnet["hash"], files=valid_files) - return availability + return None except Exception as e: logger.error(f"Failed to get instant availability: {e}") - return {} + return None def _walk_files(self, files: List[dict]) -> Iterator[Tuple[str, int]]: """Walks nested files structure and yields filename, size pairs""" diff --git a/src/program/services/downloaders/models.py b/src/program/services/downloaders/models.py index 1c408762..396203e3 100644 --- a/src/program/services/downloaders/models.py +++ b/src/program/services/downloaders/models.py @@ -41,9 +41,9 @@ class NoMatchingFilesException(Exception): class DebridFile(BaseModel): """Represents a file in from a debrid service""" - file_id: Optional[int] = None - filename: Optional[str] = None - filesize: Optional[int] = None + file_id: Optional[int] = Field(default=None) + filename: Optional[str] = Field(default=None) + filesize: Optional[int] = Field(default=None) @classmethod def create(cls, filename: str, filesize_bytes: int, filetype: Literal["movie", "episode"], file_id: Optional[int] = None) -> Optional["DebridFile"]: @@ -58,6 +58,7 @@ def create(cls, filename: str, filesize_bytes: int, filetype: Literal["movie", " elif filetype == "episode": if not (FILESIZE_EPISODE_CONSTRAINT[0] <= filesize_mb <= FILESIZE_EPISODE_CONSTRAINT[1]): return None + return cls(filename=filename, filesize=filesize_bytes, file_id=file_id) @@ -83,6 +84,7 @@ def file_ids(self) -> List[int]: """Get the file ids of the cached files""" return [file.file_id for file in self.files if file.file_id is not None] + class TorrentInfo(BaseModel): """Torrent information from a debrid service""" id: Union[int, str] diff --git a/src/program/services/downloaders/realdebrid.py b/src/program/services/downloaders/realdebrid.py index 8d0622b9..27ea422b 100644 --- a/src/program/services/downloaders/realdebrid.py +++ b/src/program/services/downloaders/realdebrid.py @@ -128,53 +128,64 @@ def _validate_premium(self) -> bool: logger.error(f"Failed to validate premium status: {e}") return False - def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: - """Get instant availability for multiple infohashes with retry logic""" - # Real-Debrid does not support instant availability anymore so lets create a makeshift one! - containers: List[TorrentContainer] = [] + def get_instant_availability(self, infohash: str, item_type: str) -> Optional[TorrentContainer]: + """ + Get instant availability for multiple infohashes. + Creates a makeshift availability check since Real-Debrid no longer supports instant availability. + """ + valid_container: Optional[TorrentContainer] = None torrent_id = None - break_loop = False - for infohash in infohashes: - try: - # lets go over all the hashes and add them to see what files they have - torrent_id = self.add_torrent(infohash) - torrent_info = self.get_torrent_info(torrent_id) - if torrent_info.status == "waiting_files_selection": - ids = [ - file_id for file_id in torrent_info.files.keys() - if torrent_info.files[file_id]["filename"].endswith(tuple(ext.lower() for ext in VIDEO_EXTENSIONS)) - ] - if not ids: - logger.debug(f"No video files found in torrent {torrent_id} with infohash {infohash}") - continue - self.select_files(torrent_id, ids) - torrent_info = self.get_torrent_info(torrent_id) - if torrent_info.status != "downloaded": - # this isnt cached, so we skip it - logger.debug(f"Torrent {torrent_id} with infohash {infohash} is not cached") - continue - if torrent_info.files: - torrent_files = [ - file for file in ( - DebridFile.create(file_info["filename"], file_info["bytes"], item_type, file_id) - for file_id, file_info in torrent_info.files.items() - ) if file is not None - ] - if torrent_files: - container = TorrentContainer(infohash=infohash, files=torrent_files) - containers.append(container) - except Exception as e: - logger.error(f"Failed to get instant availability for {infohash}: {e}") - break_loop = True - finally: - # Delete the torrent because we don't need it anymore - # we just wanted to know what files are inside - if torrent_id: - self.delete_torrent(torrent_id) - torrent_id = None - if break_loop: - break - return containers + + try: + torrent_id = self.add_torrent(infohash) + container = self._process_torrent(torrent_id, infohash, item_type) + if container: + valid_container = container + except Exception as e: + logger.error(f"Failed to get instant availability for {infohash}: {e}") + finally: + if torrent_id is not None: + self.delete_torrent(torrent_id) + + return valid_container + + def _process_torrent(self, torrent_id: str, infohash: str, item_type: str) -> Optional[TorrentContainer]: + """Process a single torrent and return a TorrentContainer if valid.""" + torrent_info = self.get_torrent_info(torrent_id) + + if torrent_info.status == "waiting_files_selection": + video_file_ids = [ + file_id for file_id, file_info in torrent_info.files.items() + if file_info["filename"].endswith(tuple(ext.lower() for ext in VIDEO_EXTENSIONS)) + ] + + if not video_file_ids: + logger.debug(f"No video files found in torrent {torrent_id} with infohash {infohash}") + return None + + self.select_files(torrent_id, video_file_ids) + torrent_info = self.get_torrent_info(torrent_id) + + if torrent_info.status != "downloaded": + logger.debug(f"Torrent {torrent_id} with infohash {infohash} is not cached") + return None + + if not torrent_info.files: + return None + + torrent_files = [ + file for file in ( + DebridFile.create( + file_info["filename"], + file_info["bytes"], + item_type, + file_id + ) + for file_id, file_info in torrent_info.files.items() + ) if file is not None + ] + + return TorrentContainer(infohash=infohash, files=torrent_files) if torrent_files else None def add_torrent(self, infohash: str) -> str: """Add a torrent by infohash""" diff --git a/src/program/services/downloaders/shared.py b/src/program/services/downloaders/shared.py index 2c656051..309813e8 100644 --- a/src/program/services/downloaders/shared.py +++ b/src/program/services/downloaders/shared.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod from datetime import datetime -from typing import List +from typing import List, Optional from RTN import ParsedData, parse from program.services.downloaders.models import ParsedFileData, TorrentInfo, TorrentContainer @@ -20,15 +20,16 @@ def validate(self) -> bool: pass @abstractmethod - def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: + def get_instant_availability(self, infohash: str, item_type: str) -> Optional[TorrentContainer]: """ - Get instant availability for multiple infohashes + Get instant availability for a single infohash Args: - infohashes: List of torrent hashes to check + infohash: The hash of the torrent to check + item_type: The type of media item being checked Returns: - List[TorrentContainer]: Cached status and available files for each hash + Optional[TorrentContainer]: Cached status and available files for the hash, or None if not available """ pass diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py index f72d8034..76881535 100644 --- a/src/program/services/downloaders/torbox.py +++ b/src/program/services/downloaders/torbox.py @@ -112,34 +112,33 @@ def _validate_premium(self) -> bool: logger.error(f"Failed to validate premium status: {e}") return False - def get_instant_availability(self, infohashes: List[str], item_type: str) -> List[TorrentContainer]: - """Get instant availability for multiple infohashes with retry logic""" - results = [] + def get_instant_availability(self, infohash: str, item_type: str) -> Optional[TorrentContainer]: + """Get instant availability for a single infohash with retry logic""" for attempt in range(self.MAX_RETRIES): try: - hash_string = ','.join(infohashes) response = self.api.request_handler.execute( HttpMethod.GET, - f"torrents/checkcached?hash={hash_string}&format=list&list_files=true", - timeout=15 + f"torrents/checkcached?hash={infohash}&format=list&list_files=true" ) data: list = response["data"] if not data: - return results - - for torrent in data: - files = [] - for file in torrent["files"]: - debrid_file = DebridFile.create(file["name"], file["size"], item_type) - if debrid_file: - files.append(debrid_file) - if files: - results.append(TorrentContainer( - infohash=torrent["hash"], - files=files - )) - return results + return None + + torrent = data[0] # We only expect one result since we're passing one hash + files = [] + for file in torrent["files"]: + debrid_file = DebridFile.create(file["name"], file["size"], item_type) + if debrid_file: + files.append(debrid_file) + + if files: + return TorrentContainer( + infohash=torrent["hash"], + files=files + ) + return None + except Exception as e: logger.debug(f"Failed to get instant availability (attempt {attempt + 1}/{self.MAX_RETRIES}): {e}") if attempt < self.MAX_RETRIES - 1: @@ -147,7 +146,7 @@ def get_instant_availability(self, infohashes: List[str], item_type: str) -> Lis continue logger.debug("All retry attempts failed for instant availability") - return [] + return None def add_torrent(self, infohash: str) -> str: """Add a torrent by infohash""" From 1b467cad4d233db44afff95732632b6adee5c0d1 Mon Sep 17 00:00:00 2001 From: Spoked Date: Tue, 26 Nov 2024 02:37:22 -0500 Subject: [PATCH 09/12] fix: check download state before downloading. added bucket limit feature --- poetry.lock | 667 +++++++++---------- src/program/program.py | 9 +- src/program/services/downloaders/__init__.py | 11 +- src/program/services/scrapers/shared.py | 10 +- src/program/settings/manager.py | 13 +- src/program/settings/models.py | 1 + src/program/types.py | 4 +- 7 files changed, 353 insertions(+), 362 deletions(-) diff --git a/poetry.lock b/poetry.lock index a1e44126..e3eb3aaa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -71,28 +71,27 @@ requests-oauthlib = "*" [[package]] name = "apscheduler" -version = "3.10.4" +version = "3.11.0" description = "In-process task scheduler with Cron-like capabilities" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"}, - {file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"}, + {file = "APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da"}, + {file = "apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133"}, ] [package.dependencies] -pytz = "*" -six = ">=1.4.0" -tzlocal = ">=2.0,<3.dev0 || >=4.dev0" +tzlocal = ">=3.0" [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] +doc = ["packaging", "sphinx", "sphinx-rtd-theme (>=1.3.0)"] +etcd = ["etcd3", "protobuf (<=3.21.0)"] gevent = ["gevent"] mongodb = ["pymongo (>=3.0)"] redis = ["redis (>=3.0)"] rethinkdb = ["rethinkdb (>=2.4.0)"] sqlalchemy = ["sqlalchemy (>=1.4)"] -testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] +test = ["APScheduler[etcd,mongodb,redis,rethinkdb,sqlalchemy,tornado,zookeeper]", "PySide6", "anyio (>=4.5.2)", "gevent", "pytest", "pytz", "twisted"] tornado = ["tornado (>=4.3)"] twisted = ["twisted"] zookeeper = ["kazoo"] @@ -399,73 +398,73 @@ files = [ [[package]] name = "coverage" -version = "7.6.7" +version = "7.6.8" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:108bb458827765d538abcbf8288599fee07d2743357bdd9b9dad456c287e121e"}, - {file = "coverage-7.6.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c973b2fe4dc445cb865ab369df7521df9c27bf40715c837a113edaa2aa9faf45"}, - {file = "coverage-7.6.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c6b24007c4bcd0b19fac25763a7cac5035c735ae017e9a349b927cfc88f31c1"}, - {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb8af78f8f91b3b51f58f288c0994ba63c646bc1a8a22ad072e4e7e0a49f1c"}, - {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad32a981bcdedb8d2ace03b05e4fd8dace8901eec64a532b00b15217d3677dd2"}, - {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:34d23e28ccb26236718a3a78ba72744212aa383141961dd6825f6595005c8b06"}, - {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e25bacb53a8c7325e34d45dddd2f2fbae0dbc230d0e2642e264a64e17322a777"}, - {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af05bbba896c4472a29408455fe31b3797b4d8648ed0a2ccac03e074a77e2314"}, - {file = "coverage-7.6.7-cp310-cp310-win32.whl", hash = "sha256:796c9b107d11d2d69e1849b2dfe41730134b526a49d3acb98ca02f4985eeff7a"}, - {file = "coverage-7.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:987a8e3da7da4eed10a20491cf790589a8e5e07656b6dc22d3814c4d88faf163"}, - {file = "coverage-7.6.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e61b0e77ff4dddebb35a0e8bb5a68bf0f8b872407d8d9f0c726b65dfabe2469"}, - {file = "coverage-7.6.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a5407a75ca4abc20d6252efeb238377a71ce7bda849c26c7a9bece8680a5d99"}, - {file = "coverage-7.6.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df002e59f2d29e889c37abd0b9ee0d0e6e38c24f5f55d71ff0e09e3412a340ec"}, - {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673184b3156cba06154825f25af33baa2671ddae6343f23175764e65a8c4c30b"}, - {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69ad502f1a2243f739f5bd60565d14a278be58be4c137d90799f2c263e7049a"}, - {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60dcf7605c50ea72a14490d0756daffef77a5be15ed1b9fea468b1c7bda1bc3b"}, - {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9c2eb378bebb2c8f65befcb5147877fc1c9fbc640fc0aad3add759b5df79d55d"}, - {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c0317288f032221d35fa4cbc35d9f4923ff0dfd176c79c9b356e8ef8ef2dff4"}, - {file = "coverage-7.6.7-cp311-cp311-win32.whl", hash = "sha256:951aade8297358f3618a6e0660dc74f6b52233c42089d28525749fc8267dccd2"}, - {file = "coverage-7.6.7-cp311-cp311-win_amd64.whl", hash = "sha256:5e444b8e88339a2a67ce07d41faabb1d60d1004820cee5a2c2b54e2d8e429a0f"}, - {file = "coverage-7.6.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f07ff574986bc3edb80e2c36391678a271d555f91fd1d332a1e0f4b5ea4b6ea9"}, - {file = "coverage-7.6.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:49ed5ee4109258973630c1f9d099c7e72c5c36605029f3a91fe9982c6076c82b"}, - {file = "coverage-7.6.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3e8796434a8106b3ac025fd15417315d7a58ee3e600ad4dbcfddc3f4b14342c"}, - {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b925300484a3294d1c70f6b2b810d6526f2929de954e5b6be2bf8caa1f12c1"}, - {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c42ec2c522e3ddd683dec5cdce8e62817afb648caedad9da725001fa530d354"}, - {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0266b62cbea568bd5e93a4da364d05de422110cbed5056d69339bd5af5685433"}, - {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e5f2a0f161d126ccc7038f1f3029184dbdf8f018230af17ef6fd6a707a5b881f"}, - {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c132b5a22821f9b143f87446805e13580b67c670a548b96da945a8f6b4f2efbb"}, - {file = "coverage-7.6.7-cp312-cp312-win32.whl", hash = "sha256:7c07de0d2a110f02af30883cd7dddbe704887617d5c27cf373362667445a4c76"}, - {file = "coverage-7.6.7-cp312-cp312-win_amd64.whl", hash = "sha256:fd49c01e5057a451c30c9b892948976f5d38f2cbd04dc556a82743ba8e27ed8c"}, - {file = "coverage-7.6.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:46f21663e358beae6b368429ffadf14ed0a329996248a847a4322fb2e35d64d3"}, - {file = "coverage-7.6.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:40cca284c7c310d622a1677f105e8507441d1bb7c226f41978ba7c86979609ab"}, - {file = "coverage-7.6.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77256ad2345c29fe59ae861aa11cfc74579c88d4e8dbf121cbe46b8e32aec808"}, - {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87ea64b9fa52bf395272e54020537990a28078478167ade6c61da7ac04dc14bc"}, - {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d608a7808793e3615e54e9267519351c3ae204a6d85764d8337bd95993581a8"}, - {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdd94501d65adc5c24f8a1a0eda110452ba62b3f4aeaba01e021c1ed9cb8f34a"}, - {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82c809a62e953867cf57e0548c2b8464207f5f3a6ff0e1e961683e79b89f2c55"}, - {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb684694e99d0b791a43e9fc0fa58efc15ec357ac48d25b619f207c41f2fd384"}, - {file = "coverage-7.6.7-cp313-cp313-win32.whl", hash = "sha256:963e4a08cbb0af6623e61492c0ec4c0ec5c5cf74db5f6564f98248d27ee57d30"}, - {file = "coverage-7.6.7-cp313-cp313-win_amd64.whl", hash = "sha256:14045b8bfd5909196a90da145a37f9d335a5d988a83db34e80f41e965fb7cb42"}, - {file = "coverage-7.6.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f2c7a045eef561e9544359a0bf5784b44e55cefc7261a20e730baa9220c83413"}, - {file = "coverage-7.6.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dd4e4a49d9c72a38d18d641135d2fb0bdf7b726ca60a103836b3d00a1182acd"}, - {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c95e0fa3d1547cb6f021ab72f5c23402da2358beec0a8e6d19a368bd7b0fb37"}, - {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f63e21ed474edd23f7501f89b53280014436e383a14b9bd77a648366c81dce7b"}, - {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead9b9605c54d15be228687552916c89c9683c215370c4a44f1f217d2adcc34d"}, - {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0573f5cbf39114270842d01872952d301027d2d6e2d84013f30966313cadb529"}, - {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e2c8e3384c12dfa19fa9a52f23eb091a8fad93b5b81a41b14c17c78e23dd1d8b"}, - {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:70a56a2ec1869e6e9fa69ef6b76b1a8a7ef709972b9cc473f9ce9d26b5997ce3"}, - {file = "coverage-7.6.7-cp313-cp313t-win32.whl", hash = "sha256:dbba8210f5067398b2c4d96b4e64d8fb943644d5eb70be0d989067c8ca40c0f8"}, - {file = "coverage-7.6.7-cp313-cp313t-win_amd64.whl", hash = "sha256:dfd14bcae0c94004baba5184d1c935ae0d1231b8409eb6c103a5fd75e8ecdc56"}, - {file = "coverage-7.6.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37a15573f988b67f7348916077c6d8ad43adb75e478d0910957394df397d2874"}, - {file = "coverage-7.6.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b6cce5c76985f81da3769c52203ee94722cd5d5889731cd70d31fee939b74bf0"}, - {file = "coverage-7.6.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ab9763d291a17b527ac6fd11d1a9a9c358280adb320e9c2672a97af346ac2c"}, - {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cf96ceaa275f071f1bea3067f8fd43bec184a25a962c754024c973af871e1b7"}, - {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee9cf6b0134d6f932d219ce253ef0e624f4fa588ee64830fcba193269e4daa3"}, - {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2bc3e45c16564cc72de09e37413262b9f99167803e5e48c6156bccdfb22c8327"}, - {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:623e6965dcf4e28a3debaa6fcf4b99ee06d27218f46d43befe4db1c70841551c"}, - {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850cfd2d6fc26f8346f422920ac204e1d28814e32e3a58c19c91980fa74d8289"}, - {file = "coverage-7.6.7-cp39-cp39-win32.whl", hash = "sha256:c296263093f099da4f51b3dff1eff5d4959b527d4f2f419e16508c5da9e15e8c"}, - {file = "coverage-7.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:90746521206c88bdb305a4bf3342b1b7316ab80f804d40c536fc7d329301ee13"}, - {file = "coverage-7.6.7-pp39.pp310-none-any.whl", hash = "sha256:0ddcb70b3a3a57581b450571b31cb774f23eb9519c2aaa6176d3a84c9fc57671"}, - {file = "coverage-7.6.7.tar.gz", hash = "sha256:d79d4826e41441c9a118ff045e4bccb9fdbdcb1d02413e7ea6eb5c87b5439d24"}, + {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, + {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, + {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, + {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, + {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, + {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, + {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, + {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, + {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, + {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, + {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, + {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, + {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, + {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, + {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, + {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, + {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, + {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, + {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, + {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, + {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, + {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, + {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, + {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, + {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, + {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, + {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, + {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, + {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, + {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, + {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, + {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, + {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, + {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, + {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, + {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, + {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, + {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, ] [package.extras] @@ -1685,18 +1684,18 @@ files = [ [[package]] name = "pydantic" -version = "2.10.0" +version = "2.10.1" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.0-py3-none-any.whl", hash = "sha256:5e7807ba9201bdf61b1b58aa6eb690916c40a47acfb114b1b4fef3e7fd5b30fc"}, - {file = "pydantic-2.10.0.tar.gz", hash = "sha256:0aca0f045ff6e2f097f1fe89521115335f15049eeb8a7bef3dafe4b19a74e289"}, + {file = "pydantic-2.10.1-py3-none-any.whl", hash = "sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e"}, + {file = "pydantic-2.10.1.tar.gz", hash = "sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.27.0" +pydantic-core = "2.27.1" typing-extensions = ">=4.12.2" [package.extras] @@ -1705,111 +1704,111 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.27.0" +version = "2.27.1" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic_core-2.27.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:cd2ac6b919f7fed71b17fe0b4603c092a4c9b5bae414817c9c81d3c22d1e1bcc"}, - {file = "pydantic_core-2.27.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e015833384ca3e1a0565a79f5d953b0629d9138021c27ad37c92a9fa1af7623c"}, - {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db72e40628967f6dc572020d04b5f800d71264e0531c6da35097e73bdf38b003"}, - {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:df45c4073bed486ea2f18757057953afed8dd77add7276ff01bccb79982cf46c"}, - {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:836a4bfe0cc6d36dc9a9cc1a7b391265bf6ce9d1eb1eac62ac5139f5d8d9a6fa"}, - {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4bf1340ae507f6da6360b24179c2083857c8ca7644aab65807023cf35404ea8d"}, - {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ab325fc86fbc077284c8d7f996d904d30e97904a87d6fb303dce6b3de7ebba9"}, - {file = "pydantic_core-2.27.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1da0c98a85a6c6ed702d5556db3b09c91f9b0b78de37b7593e2de8d03238807a"}, - {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7b0202ebf2268954090209a84f9897345719e46a57c5f2c9b7b250ca0a9d3e63"}, - {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:35380671c3c921fe8adf31ad349dc6f7588b7e928dbe44e1093789734f607399"}, - {file = "pydantic_core-2.27.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b4c19525c3538fbc0bbda6229f9682fb8199ce9ac37395880e6952798e00373"}, - {file = "pydantic_core-2.27.0-cp310-none-win32.whl", hash = "sha256:333c840a1303d1474f491e7be0b718226c730a39ead0f7dab2c7e6a2f3855555"}, - {file = "pydantic_core-2.27.0-cp310-none-win_amd64.whl", hash = "sha256:99b2863c1365f43f74199c980a3d40f18a218fbe683dd64e470199db426c4d6a"}, - {file = "pydantic_core-2.27.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4523c4009c3f39d948e01962223c9f5538602e7087a628479b723c939fab262d"}, - {file = "pydantic_core-2.27.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:84af1cf7bfdcbc6fcf5a5f70cc9896205e0350306e4dd73d54b6a18894f79386"}, - {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e65466b31be1070b4a5b7dbfbd14b247884cb8e8b79c64fb0f36b472912dbaea"}, - {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a5c022bb0d453192426221605efc865373dde43b17822a264671c53b068ac20c"}, - {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6bb69bf3b6500f195c3deb69c1205ba8fc3cb21d1915f1f158a10d6b1ef29b6a"}, - {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aa4d1b2eba9a325897308b3124014a142cdccb9f3e016f31d3ebee6b5ea5e75"}, - {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e96ca781e0c01e32115912ebdf7b3fb0780ce748b80d7d28a0802fa9fbaf44e"}, - {file = "pydantic_core-2.27.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b872c86d8d71827235c7077461c502feb2db3f87d9d6d5a9daa64287d75e4fa0"}, - {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:82e1ad4ca170e8af4c928b67cff731b6296e6a0a0981b97b2eb7c275cc4e15bd"}, - {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:eb40f828bc2f73f777d1eb8fee2e86cd9692a4518b63b6b5aa8af915dfd3207b"}, - {file = "pydantic_core-2.27.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9a8fbf506fde1529a1e3698198fe64bfbe2e0c09557bc6a7dcf872e7c01fec40"}, - {file = "pydantic_core-2.27.0-cp311-none-win32.whl", hash = "sha256:24f984fc7762ed5f806d9e8c4c77ea69fdb2afd987b4fd319ef06c87595a8c55"}, - {file = "pydantic_core-2.27.0-cp311-none-win_amd64.whl", hash = "sha256:68950bc08f9735306322bfc16a18391fcaac99ded2509e1cc41d03ccb6013cfe"}, - {file = "pydantic_core-2.27.0-cp311-none-win_arm64.whl", hash = "sha256:3eb8849445c26b41c5a474061032c53e14fe92a11a5db969f722a2716cd12206"}, - {file = "pydantic_core-2.27.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8117839a9bdbba86e7f9df57018fe3b96cec934c3940b591b0fd3fbfb485864a"}, - {file = "pydantic_core-2.27.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a291d0b4243a259c8ea7e2b84eb9ccb76370e569298875a7c5e3e71baf49057a"}, - {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e35afd9e10b2698e6f2f32256678cb23ca6c1568d02628033a837638b3ed12"}, - {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:58ab0d979c969983cdb97374698d847a4acffb217d543e172838864636ef10d9"}, - {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d06b667e53320332be2bf6f9461f4a9b78092a079b8ce8634c9afaa7e10cd9f"}, - {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78f841523729e43e3928a364ec46e2e3f80e6625a4f62aca5c345f3f626c6e8a"}, - {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:400bf470e4327e920883b51e255617dfe4496d4e80c3fea0b5a5d0bf2c404dd4"}, - {file = "pydantic_core-2.27.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:951e71da6c89d354572098bada5ba5b5dc3a9390c933af8a614e37755d3d1840"}, - {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a51ce96224eadd1845150b204389623c8e129fde5a67a84b972bd83a85c6c40"}, - {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:483c2213a609e7db2c592bbc015da58b6c75af7360ca3c981f178110d9787bcf"}, - {file = "pydantic_core-2.27.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:359e7951f04ad35111b5ddce184db3391442345d0ab073aa63a95eb8af25a5ef"}, - {file = "pydantic_core-2.27.0-cp312-none-win32.whl", hash = "sha256:ee7d9d5537daf6d5c74a83b38a638cc001b648096c1cae8ef695b0c919d9d379"}, - {file = "pydantic_core-2.27.0-cp312-none-win_amd64.whl", hash = "sha256:2be0ad541bb9f059954ccf8877a49ed73877f862529575ff3d54bf4223e4dd61"}, - {file = "pydantic_core-2.27.0-cp312-none-win_arm64.whl", hash = "sha256:6e19401742ed7b69e51d8e4df3c03ad5ec65a83b36244479fd70edde2828a5d9"}, - {file = "pydantic_core-2.27.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:5f2b19b8d6fca432cb3acf48cf5243a7bf512988029b6e6fd27e9e8c0a204d85"}, - {file = "pydantic_core-2.27.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c86679f443e7085ea55a7376462553996c688395d18ef3f0d3dbad7838f857a2"}, - {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:510b11e9c3b1a852876d1ccd8d5903684336d635214148637ceb27366c75a467"}, - {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb704155e73b833801c247f39d562229c0303f54770ca14fb1c053acb376cf10"}, - {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ce048deb1e033e7a865ca384770bccc11d44179cf09e5193a535c4c2f497bdc"}, - {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58560828ee0951bb125c6f2862fbc37f039996d19ceb6d8ff1905abf7da0bf3d"}, - {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abb4785894936d7682635726613c44578c420a096729f1978cd061a7e72d5275"}, - {file = "pydantic_core-2.27.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2883b260f7a93235488699d39cbbd94fa7b175d3a8063fbfddd3e81ad9988cb2"}, - {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c6fcb3fa3855d583aa57b94cf146f7781d5d5bc06cb95cb3afece33d31aac39b"}, - {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:e851a051f7260e6d688267eb039c81f05f23a19431bd7dfa4bf5e3cb34c108cd"}, - {file = "pydantic_core-2.27.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edb1bfd45227dec8d50bc7c7d86463cd8728bcc574f9b07de7369880de4626a3"}, - {file = "pydantic_core-2.27.0-cp313-none-win32.whl", hash = "sha256:678f66462058dd978702db17eb6a3633d634f7aa0deaea61e0a674152766d3fc"}, - {file = "pydantic_core-2.27.0-cp313-none-win_amd64.whl", hash = "sha256:d28ca7066d6cdd347a50d8b725dc10d9a1d6a1cce09836cf071ea6a2d4908be0"}, - {file = "pydantic_core-2.27.0-cp313-none-win_arm64.whl", hash = "sha256:6f4a53af9e81d757756508b57cae1cf28293f0f31b9fa2bfcb416cc7fb230f9d"}, - {file = "pydantic_core-2.27.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:e9f9feee7f334b72ceae46313333d002b56f325b5f04271b4ae2aadd9e993ae4"}, - {file = "pydantic_core-2.27.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:225bfff5d425c34e1fd562cef52d673579d59b967d9de06178850c4802af9039"}, - {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921ad596ff1a82f9c692b0758c944355abc9f0de97a4c13ca60ffc6d8dc15d4"}, - {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6354e18a9be37bfa124d6b288a87fb30c673745806c92956f1a25e3ae6e76b96"}, - {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ee4c2a75af9fe21269a4a0898c5425afb01af1f5d276063f57e2ae1bc64e191"}, - {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c91e3c04f5191fd3fb68764bddeaf02025492d5d9f23343b283870f6ace69708"}, - {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a6ebfac28fd51890a61df36ef202adbd77d00ee5aca4a3dadb3d9ed49cfb929"}, - {file = "pydantic_core-2.27.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36aa167f69d8807ba7e341d67ea93e50fcaaf6bc433bb04939430fa3dab06f31"}, - {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e8d89c276234579cd3d095d5fa2a44eb10db9a218664a17b56363cddf226ff3"}, - {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:5cc822ab90a70ea3a91e6aed3afac570b276b1278c6909b1d384f745bd09c714"}, - {file = "pydantic_core-2.27.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e15315691fe2253eb447503153acef4d7223dfe7e7702f9ed66539fcd0c43801"}, - {file = "pydantic_core-2.27.0-cp38-none-win32.whl", hash = "sha256:dfa5f5c0a4c8fced1422dc2ca7eefd872d5d13eb33cf324361dbf1dbfba0a9fe"}, - {file = "pydantic_core-2.27.0-cp38-none-win_amd64.whl", hash = "sha256:513cb14c0cc31a4dfd849a4674b20c46d87b364f997bbcb02282306f5e187abf"}, - {file = "pydantic_core-2.27.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:4148dc9184ab79e356dc00a4199dc0ee8647973332cb385fc29a7cced49b9f9c"}, - {file = "pydantic_core-2.27.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5fc72fbfebbf42c0856a824b8b0dc2b5cd2e4a896050281a21cfa6fed8879cb1"}, - {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:185ef205256cd8b38431205698531026979db89a79587725c1e55c59101d64e9"}, - {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:395e3e1148fa7809016231f8065f30bb0dc285a97b4dc4360cd86e17bab58af7"}, - {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33d14369739c5d07e2e7102cdb0081a1fa46ed03215e07f097b34e020b83b1ae"}, - {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e7820bb0d65e3ce1e3e70b6708c2f66143f55912fa02f4b618d0f08b61575f12"}, - {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43b61989068de9ce62296cde02beffabcadb65672207fc51e7af76dca75e6636"}, - {file = "pydantic_core-2.27.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15e350efb67b855cd014c218716feea4986a149ed1f42a539edd271ee074a196"}, - {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:433689845288f9a1ee5714444e65957be26d30915f7745091ede4a83cfb2d7bb"}, - {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:3fd8bc2690e7c39eecdf9071b6a889ce7b22b72073863940edc2a0a23750ca90"}, - {file = "pydantic_core-2.27.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:884f1806609c2c66564082540cffc96868c5571c7c3cf3a783f63f2fb49bd3cd"}, - {file = "pydantic_core-2.27.0-cp39-none-win32.whl", hash = "sha256:bf37b72834e7239cf84d4a0b2c050e7f9e48bced97bad9bdf98d26b8eb72e846"}, - {file = "pydantic_core-2.27.0-cp39-none-win_amd64.whl", hash = "sha256:31a2cae5f059329f9cfe3d8d266d3da1543b60b60130d186d9b6a3c20a346361"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4fb49cfdb53af5041aba909be00cccfb2c0d0a2e09281bf542371c5fd36ad04c"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:49633583eb7dc5cba61aaf7cdb2e9e662323ad394e543ee77af265736bcd3eaa"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:153017e3d6cd3ce979de06d84343ca424bb6092727375eba1968c8b4693c6ecb"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff63a92f6e249514ef35bc795de10745be0226eaea06eb48b4bbeaa0c8850a4a"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5982048129f40b082c2654de10c0f37c67a14f5ff9d37cf35be028ae982f26df"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:91bc66f878557313c2a6bcf396e7befcffe5ab4354cfe4427318968af31143c3"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:68ef5377eb582fa4343c9d0b57a5b094046d447b4c73dd9fbd9ffb216f829e7d"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:c5726eec789ee38f2c53b10b1821457b82274f81f4f746bb1e666d8741fcfadb"}, - {file = "pydantic_core-2.27.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c0c431e4be5c1a0c6654e0c31c661cd89e0ca956ef65305c3c3fd96f4e72ca39"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8e21d927469d04b39386255bf00d0feedead16f6253dcc85e9e10ddebc334084"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b51f964fcbb02949fc546022e56cdb16cda457af485e9a3e8b78ac2ecf5d77e"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25a7fd4de38f7ff99a37e18fa0098c3140286451bc823d1746ba80cec5b433a1"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fda87808429c520a002a85d6e7cdadbf58231d60e96260976c5b8f9a12a8e13"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a150392102c402c538190730fda06f3bce654fc498865579a9f2c1d2b425833"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c9ed88b398ba7e3bad7bd64d66cc01dcde9cfcb7ec629a6fd78a82fa0b559d78"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:9fe94d9d2a2b4edd7a4b22adcd45814b1b59b03feb00e56deb2e89747aec7bfe"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d8b5ee4ae9170e2775d495b81f414cc20268041c42571530513496ba61e94ba3"}, - {file = "pydantic_core-2.27.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d29e235ce13c91902ef3efc3d883a677655b3908b1cbc73dee816e5e1f8f7739"}, - {file = "pydantic_core-2.27.0.tar.gz", hash = "sha256:f57783fbaf648205ac50ae7d646f27582fc706be3977e87c3c124e7a92407b10"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, + {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, + {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, + {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, + {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, + {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, ] [package.dependencies] @@ -1968,17 +1967,6 @@ files = [ [package.extras] cli = ["click (>=5.0)"] -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - [[package]] name = "pywin32" version = "308" @@ -2070,13 +2058,13 @@ files = [ [[package]] name = "rank-torrent-name" -version = "1.5.4" +version = "1.6.0" description = "Rank filenames based on user-defined rules!" optional = false python-versions = "<4.0,>=3.11" files = [ - {file = "rank_torrent_name-1.5.4-py3-none-any.whl", hash = "sha256:6b97c3ddb9eca5418df026530e03ed3de47cfe2463cc33164e47b0c3aa823805"}, - {file = "rank_torrent_name-1.5.4.tar.gz", hash = "sha256:fe8575347a33a3aa03b0c115fe28ee563ca39deea0572bde208d934de2d19e96"}, + {file = "rank_torrent_name-1.6.0-py3-none-any.whl", hash = "sha256:5c6258dcd99f12d39839301b797899bc44a09b671ab646c30292e6c7a85bf26e"}, + {file = "rank_torrent_name-1.6.0.tar.gz", hash = "sha256:5b5b777245a7b61851ef098166d34e044a78683b973f69ad59971f8809c57002"}, ] [package.dependencies] @@ -3029,94 +3017,82 @@ test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", [[package]] name = "watchfiles" -version = "0.24.0" +version = "1.0.0" description = "Simple, modern and high performance file watching and code reload in python." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, - {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, - {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, - {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, - {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, - {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, - {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, - {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, - {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, - {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, - {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, - {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, - {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, - {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, - {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, - {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, - {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, - {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, - {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, - {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, - {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, - {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, - {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, - {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, - {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, - {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, - {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, - {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, - {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, - {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, - {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, - {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, - {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, - {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, - {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, - {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, - {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, - {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, - {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, + {file = "watchfiles-1.0.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:1d19df28f99d6a81730658fbeb3ade8565ff687f95acb59665f11502b441be5f"}, + {file = "watchfiles-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:28babb38cf2da8e170b706c4b84aa7e4528a6fa4f3ee55d7a0866456a1662041"}, + {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12ab123135b2f42517f04e720526d41448667ae8249e651385afb5cda31fedc0"}, + {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:13a4f9ee0cd25682679eea5c14fc629e2eaa79aab74d963bc4e21f43b8ea1877"}, + {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e1d9284cc84de7855fcf83472e51d32daf6f6cecd094160192628bc3fee1b78"}, + {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ee5edc939f53466b329bbf2e58333a5461e6c7b50c980fa6117439e2c18b42d"}, + {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5dccfc70480087567720e4e36ec381bba1ed68d7e5f368fe40c93b3b1eba0105"}, + {file = "watchfiles-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c83a6d33a9eda0af6a7470240d1af487807adc269704fe76a4972dd982d16236"}, + {file = "watchfiles-1.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:905f69aad276639eff3893759a07d44ea99560e67a1cf46ff389cd62f88872a2"}, + {file = "watchfiles-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09551237645d6bff3972592f2aa5424df9290e7a2e15d63c5f47c48cde585935"}, + {file = "watchfiles-1.0.0-cp310-none-win32.whl", hash = "sha256:d2b39aa8edd9e5f56f99a2a2740a251dc58515398e9ed5a4b3e5ff2827060755"}, + {file = "watchfiles-1.0.0-cp310-none-win_amd64.whl", hash = "sha256:2de52b499e1ab037f1a87cb8ebcb04a819bf087b1015a4cf6dcf8af3c2a2613e"}, + {file = "watchfiles-1.0.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:fbd0ab7a9943bbddb87cbc2bf2f09317e74c77dc55b1f5657f81d04666c25269"}, + {file = "watchfiles-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:774ef36b16b7198669ce655d4f75b4c3d370e7f1cbdfb997fb10ee98717e2058"}, + {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b4fb98100267e6a5ebaff6aaa5d20aea20240584647470be39fe4823012ac96"}, + {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0fc3bf0effa2d8075b70badfdd7fb839d7aa9cea650d17886982840d71fdeabf"}, + {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:648e2b6db53eca6ef31245805cd528a16f56fa4cc15aeec97795eaf713c11435"}, + {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa13d604fcb9417ae5f2e3de676e66aa97427d888e83662ad205bed35a313176"}, + {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:936f362e7ff28311b16f0b97ec51e8f2cc451763a3264640c6ed40fb252d1ee4"}, + {file = "watchfiles-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245fab124b9faf58430da547512d91734858df13f2ddd48ecfa5e493455ffccb"}, + {file = "watchfiles-1.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4ff9c7e84e8b644a8f985c42bcc81457240316f900fc72769aaedec9d088055a"}, + {file = "watchfiles-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c9a8d8fd97defe935ef8dd53d562e68942ad65067cd1c54d6ed8a088b1d931d"}, + {file = "watchfiles-1.0.0-cp311-none-win32.whl", hash = "sha256:a0abf173975eb9dd17bb14c191ee79999e650997cc644562f91df06060610e62"}, + {file = "watchfiles-1.0.0-cp311-none-win_amd64.whl", hash = "sha256:2a825ba4b32c214e3855b536eb1a1f7b006511d8e64b8215aac06eb680642d84"}, + {file = "watchfiles-1.0.0-cp311-none-win_arm64.whl", hash = "sha256:a5a7a06cfc65e34fd0a765a7623c5ba14707a0870703888e51d3d67107589817"}, + {file = "watchfiles-1.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:28fb64b5843d94e2c2483f7b024a1280662a44409bedee8f2f51439767e2d107"}, + {file = "watchfiles-1.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e3750434c83b61abb3163b49c64b04180b85b4dabb29a294513faec57f2ffdb7"}, + {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bedf84835069f51c7b026b3ca04e2e747ea8ed0a77c72006172c72d28c9f69fc"}, + {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:90004553be36427c3d06ec75b804233f8f816374165d5225b93abd94ba6e7234"}, + {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b46e15c34d4e401e976d6949ad3a74d244600d5c4b88c827a3fdf18691a46359"}, + {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:487d15927f1b0bd24e7df921913399bb1ab94424c386bea8b267754d698f8f0e"}, + {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ff236d7a3f4b0a42f699a22fc374ba526bc55048a70cbb299661158e1bb5e1f"}, + {file = "watchfiles-1.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c01446626574561756067f00b37e6b09c8622b0fc1e9fdbc7cbcea328d4e514"}, + {file = "watchfiles-1.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b551c465a59596f3d08170bd7e1c532c7260dd90ed8135778038e13c5d48aa81"}, + {file = "watchfiles-1.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e1ed613ee107269f66c2df631ec0fc8efddacface85314d392a4131abe299f00"}, + {file = "watchfiles-1.0.0-cp312-none-win32.whl", hash = "sha256:5f75cd42e7e2254117cf37ff0e68c5b3f36c14543756b2da621408349bd9ca7c"}, + {file = "watchfiles-1.0.0-cp312-none-win_amd64.whl", hash = "sha256:cf517701a4a872417f4e02a136e929537743461f9ec6cdb8184d9a04f4843545"}, + {file = "watchfiles-1.0.0-cp312-none-win_arm64.whl", hash = "sha256:8a2127cd68950787ee36753e6d401c8ea368f73beaeb8e54df5516a06d1ecd82"}, + {file = "watchfiles-1.0.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:95de85c254f7fe8cbdf104731f7f87f7f73ae229493bebca3722583160e6b152"}, + {file = "watchfiles-1.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:533a7cbfe700e09780bb31c06189e39c65f06c7f447326fee707fd02f9a6e945"}, + {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2218e78e2c6c07b1634a550095ac2a429026b2d5cbcd49a594f893f2bb8c936"}, + {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9122b8fdadc5b341315d255ab51d04893f417df4e6c1743b0aac8bf34e96e025"}, + {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9272fdbc0e9870dac3b505bce1466d386b4d8d6d2bacf405e603108d50446940"}, + {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a3b33c3aefe9067ebd87846806cd5fc0b017ab70d628aaff077ab9abf4d06b3"}, + {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc338ce9f8846543d428260fa0f9a716626963148edc937d71055d01d81e1525"}, + {file = "watchfiles-1.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ac778a460ea22d63c7e6fb0bc0f5b16780ff0b128f7f06e57aaec63bd339285"}, + {file = "watchfiles-1.0.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:53ae447f06f8f29f5ab40140f19abdab822387a7c426a369eb42184b021e97eb"}, + {file = "watchfiles-1.0.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1f73c2147a453315d672c1ad907abe6d40324e34a185b51e15624bc793f93cc6"}, + {file = "watchfiles-1.0.0-cp313-none-win32.whl", hash = "sha256:eba98901a2eab909dbd79681190b9049acc650f6111fde1845484a4450761e98"}, + {file = "watchfiles-1.0.0-cp313-none-win_amd64.whl", hash = "sha256:d562a6114ddafb09c33246c6ace7effa71ca4b6a2324a47f4b09b6445ea78941"}, + {file = "watchfiles-1.0.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3d94fd83ed54266d789f287472269c0def9120a2022674990bd24ad989ebd7a0"}, + {file = "watchfiles-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48051d1c504448b2fcda71c5e6e3610ae45de6a0b8f5a43b961f250be4bdf5a8"}, + {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29cf884ad4285d23453c702ed03d689f9c0e865e3c85d20846d800d4787de00f"}, + {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d3572d4c34c4e9c33d25b3da47d9570d5122f8433b9ac6519dca49c2740d23cd"}, + {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c2696611182c85eb0e755b62b456f48debff484b7306b56f05478b843ca8ece"}, + {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:550109001920a993a4383b57229c717fa73627d2a4e8fcb7ed33c7f1cddb0c85"}, + {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b555a93c15bd2c71081922be746291d776d47521a00703163e5fbe6d2a402399"}, + {file = "watchfiles-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:947ccba18a38b85c366dafeac8df2f6176342d5992ca240a9d62588b214d731f"}, + {file = "watchfiles-1.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ffd98a299b0a74d1b704ef0ed959efb753e656a4e0425c14e46ae4c3cbdd2919"}, + {file = "watchfiles-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f8c4f3a1210ed099a99e6a710df4ff2f8069411059ffe30fa5f9467ebed1256b"}, + {file = "watchfiles-1.0.0-cp39-none-win32.whl", hash = "sha256:1e176b6b4119b3f369b2b4e003d53a226295ee862c0962e3afd5a1c15680b4e3"}, + {file = "watchfiles-1.0.0-cp39-none-win_amd64.whl", hash = "sha256:2d9c0518fabf4a3f373b0a94bb9e4ea7a1df18dec45e26a4d182aa8918dee855"}, + {file = "watchfiles-1.0.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f159ac795785cde4899e0afa539f4c723fb5dd336ce5605bc909d34edd00b79b"}, + {file = "watchfiles-1.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c3d258d78341d5d54c0c804a5b7faa66cd30ba50b2756a7161db07ce15363b8d"}, + {file = "watchfiles-1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5bbd0311588c2de7f9ea5cf3922ccacfd0ec0c1922870a2be503cc7df1ca8be7"}, + {file = "watchfiles-1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9a13ac46b545a7d0d50f7641eefe47d1597e7d1783a5d89e09d080e6dff44b0"}, + {file = "watchfiles-1.0.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2bca898c1dc073912d3db7fa6926cc08be9575add9e84872de2c99c688bac4e"}, + {file = "watchfiles-1.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:06d828fe2adc4ac8a64b875ca908b892a3603d596d43e18f7948f3fef5fc671c"}, + {file = "watchfiles-1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:074c7618cd6c807dc4eaa0982b4a9d3f8051cd0b72793511848fd64630174b17"}, + {file = "watchfiles-1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95dc785bc284552d044e561b8f4fe26d01ab5ca40d35852a6572d542adfeb4bc"}, + {file = "watchfiles-1.0.0.tar.gz", hash = "sha256:37566c844c9ce3b5deb964fe1a23378e575e74b114618d211fbda8f59d7b5dab"}, ] [package.dependencies] @@ -3216,81 +3192,76 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "wrapt" -version = "1.16.0" +version = "1.17.0" description = "Module for decorators, wrappers and monkey patching." optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"}, - {file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487"}, - {file = "wrapt-1.16.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0"}, - {file = "wrapt-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136"}, - {file = "wrapt-1.16.0-cp310-cp310-win32.whl", hash = "sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d"}, - {file = "wrapt-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09"}, - {file = "wrapt-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060"}, - {file = "wrapt-1.16.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956"}, - {file = "wrapt-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d"}, - {file = "wrapt-1.16.0-cp311-cp311-win32.whl", hash = "sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362"}, - {file = "wrapt-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b"}, - {file = "wrapt-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809"}, - {file = "wrapt-1.16.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9"}, - {file = "wrapt-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c"}, - {file = "wrapt-1.16.0-cp312-cp312-win32.whl", hash = "sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc"}, - {file = "wrapt-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8"}, - {file = "wrapt-1.16.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c"}, - {file = "wrapt-1.16.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e"}, - {file = "wrapt-1.16.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465"}, - {file = "wrapt-1.16.0-cp36-cp36m-win32.whl", hash = "sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e"}, - {file = "wrapt-1.16.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966"}, - {file = "wrapt-1.16.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5"}, - {file = "wrapt-1.16.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f"}, - {file = "wrapt-1.16.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win32.whl", hash = "sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c"}, - {file = "wrapt-1.16.0-cp37-cp37m-win_amd64.whl", hash = "sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0"}, - {file = "wrapt-1.16.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e"}, - {file = "wrapt-1.16.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca"}, - {file = "wrapt-1.16.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6"}, - {file = "wrapt-1.16.0-cp38-cp38-win32.whl", hash = "sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b"}, - {file = "wrapt-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2"}, - {file = "wrapt-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c"}, - {file = "wrapt-1.16.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f"}, - {file = "wrapt-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537"}, - {file = "wrapt-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3"}, - {file = "wrapt-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35"}, - {file = "wrapt-1.16.0-py3-none-any.whl", hash = "sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1"}, - {file = "wrapt-1.16.0.tar.gz", hash = "sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d"}, + {file = "wrapt-1.17.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a0c23b8319848426f305f9cb0c98a6e32ee68a36264f45948ccf8e7d2b941f8"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ca5f060e205f72bec57faae5bd817a1560fcfc4af03f414b08fa29106b7e2d"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e185ec6060e301a7e5f8461c86fb3640a7beb1a0f0208ffde7a65ec4074931df"}, + {file = "wrapt-1.17.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb90765dd91aed05b53cd7a87bd7f5c188fcd95960914bae0d32c5e7f899719d"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:879591c2b5ab0a7184258274c42a126b74a2c3d5a329df16d69f9cee07bba6ea"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fce6fee67c318fdfb7f285c29a82d84782ae2579c0e1b385b7f36c6e8074fffb"}, + {file = "wrapt-1.17.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0698d3a86f68abc894d537887b9bbf84d29bcfbc759e23f4644be27acf6da301"}, + {file = "wrapt-1.17.0-cp310-cp310-win32.whl", hash = "sha256:69d093792dc34a9c4c8a70e4973a3361c7a7578e9cd86961b2bbf38ca71e4e22"}, + {file = "wrapt-1.17.0-cp310-cp310-win_amd64.whl", hash = "sha256:f28b29dc158ca5d6ac396c8e0a2ef45c4e97bb7e65522bfc04c989e6fe814575"}, + {file = "wrapt-1.17.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74bf625b1b4caaa7bad51d9003f8b07a468a704e0644a700e936c357c17dd45a"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f2a28eb35cf99d5f5bd12f5dd44a0f41d206db226535b37b0c60e9da162c3ed"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81b1289e99cf4bad07c23393ab447e5e96db0ab50974a280f7954b071d41b489"}, + {file = "wrapt-1.17.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f2939cd4a2a52ca32bc0b359015718472d7f6de870760342e7ba295be9ebaf9"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a9653131bda68a1f029c52157fd81e11f07d485df55410401f745007bd6d339"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4e4b4385363de9052dac1a67bfb535c376f3d19c238b5f36bddc95efae15e12d"}, + {file = "wrapt-1.17.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bdf62d25234290db1837875d4dceb2151e4ea7f9fff2ed41c0fde23ed542eb5b"}, + {file = "wrapt-1.17.0-cp311-cp311-win32.whl", hash = "sha256:5d8fd17635b262448ab8f99230fe4dac991af1dabdbb92f7a70a6afac8a7e346"}, + {file = "wrapt-1.17.0-cp311-cp311-win_amd64.whl", hash = "sha256:92a3d214d5e53cb1db8b015f30d544bc9d3f7179a05feb8f16df713cecc2620a"}, + {file = "wrapt-1.17.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:89fc28495896097622c3fc238915c79365dd0ede02f9a82ce436b13bd0ab7569"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875d240fdbdbe9e11f9831901fb8719da0bd4e6131f83aa9f69b96d18fae7504"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ed16d95fd142e9c72b6c10b06514ad30e846a0d0917ab406186541fe68b451"}, + {file = "wrapt-1.17.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18b956061b8db634120b58f668592a772e87e2e78bc1f6a906cfcaa0cc7991c1"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:daba396199399ccabafbfc509037ac635a6bc18510ad1add8fd16d4739cdd106"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4d63f4d446e10ad19ed01188d6c1e1bb134cde8c18b0aa2acfd973d41fcc5ada"}, + {file = "wrapt-1.17.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8a5e7cc39a45fc430af1aefc4d77ee6bad72c5bcdb1322cfde852c15192b8bd4"}, + {file = "wrapt-1.17.0-cp312-cp312-win32.whl", hash = "sha256:0a0a1a1ec28b641f2a3a2c35cbe86c00051c04fffcfcc577ffcdd707df3f8635"}, + {file = "wrapt-1.17.0-cp312-cp312-win_amd64.whl", hash = "sha256:3c34f6896a01b84bab196f7119770fd8466c8ae3dfa73c59c0bb281e7b588ce7"}, + {file = "wrapt-1.17.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:714c12485aa52efbc0fc0ade1e9ab3a70343db82627f90f2ecbc898fdf0bb181"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da427d311782324a376cacb47c1a4adc43f99fd9d996ffc1b3e8529c4074d393"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba1739fb38441a27a676f4de4123d3e858e494fac05868b7a281c0a383c098f4"}, + {file = "wrapt-1.17.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e711fc1acc7468463bc084d1b68561e40d1eaa135d8c509a65dd534403d83d7b"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:140ea00c87fafc42739bd74a94a5a9003f8e72c27c47cd4f61d8e05e6dec8721"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:73a96fd11d2b2e77d623a7f26e004cc31f131a365add1ce1ce9a19e55a1eef90"}, + {file = "wrapt-1.17.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0b48554952f0f387984da81ccfa73b62e52817a4386d070c75e4db7d43a28c4a"}, + {file = "wrapt-1.17.0-cp313-cp313-win32.whl", hash = "sha256:498fec8da10e3e62edd1e7368f4b24aa362ac0ad931e678332d1b209aec93045"}, + {file = "wrapt-1.17.0-cp313-cp313-win_amd64.whl", hash = "sha256:fd136bb85f4568fffca995bd3c8d52080b1e5b225dbf1c2b17b66b4c5fa02838"}, + {file = "wrapt-1.17.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17fcf043d0b4724858f25b8826c36e08f9fb2e475410bece0ec44a22d533da9b"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a557d97f12813dc5e18dad9fa765ae44ddd56a672bb5de4825527c847d6379"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0229b247b0fc7dee0d36176cbb79dbaf2a9eb7ecc50ec3121f40ef443155fb1d"}, + {file = "wrapt-1.17.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8425cfce27b8b20c9b89d77fb50e368d8306a90bf2b6eef2cdf5cd5083adf83f"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c900108df470060174108012de06d45f514aa4ec21a191e7ab42988ff42a86c"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:4e547b447073fc0dbfcbff15154c1be8823d10dab4ad401bdb1575e3fdedff1b"}, + {file = "wrapt-1.17.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:914f66f3b6fc7b915d46c1cc424bc2441841083de01b90f9e81109c9759e43ab"}, + {file = "wrapt-1.17.0-cp313-cp313t-win32.whl", hash = "sha256:a4192b45dff127c7d69b3bdfb4d3e47b64179a0b9900b6351859f3001397dabf"}, + {file = "wrapt-1.17.0-cp313-cp313t-win_amd64.whl", hash = "sha256:4f643df3d4419ea3f856c5c3f40fec1d65ea2e89ec812c83f7767c8730f9827a"}, + {file = "wrapt-1.17.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:69c40d4655e078ede067a7095544bcec5a963566e17503e75a3a3e0fe2803b13"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f495b6754358979379f84534f8dd7a43ff8cff2558dcdea4a148a6e713a758f"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:baa7ef4e0886a6f482e00d1d5bcd37c201b383f1d314643dfb0367169f94f04c"}, + {file = "wrapt-1.17.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fc931382e56627ec4acb01e09ce66e5c03c384ca52606111cee50d931a342d"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8f8909cdb9f1b237786c09a810e24ee5e15ef17019f7cecb207ce205b9b5fcce"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ad47b095f0bdc5585bced35bd088cbfe4177236c7df9984b3cc46b391cc60627"}, + {file = "wrapt-1.17.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:948a9bd0fb2c5120457b07e59c8d7210cbc8703243225dbd78f4dfc13c8d2d1f"}, + {file = "wrapt-1.17.0-cp38-cp38-win32.whl", hash = "sha256:5ae271862b2142f4bc687bdbfcc942e2473a89999a54231aa1c2c676e28f29ea"}, + {file = "wrapt-1.17.0-cp38-cp38-win_amd64.whl", hash = "sha256:f335579a1b485c834849e9075191c9898e0731af45705c2ebf70e0cd5d58beed"}, + {file = "wrapt-1.17.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d751300b94e35b6016d4b1e7d0e7bbc3b5e1751e2405ef908316c2a9024008a1"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7264cbb4a18dc4acfd73b63e4bcfec9c9802614572025bdd44d0721983fc1d9c"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33539c6f5b96cf0b1105a0ff4cf5db9332e773bb521cc804a90e58dc49b10578"}, + {file = "wrapt-1.17.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c30970bdee1cad6a8da2044febd824ef6dc4cc0b19e39af3085c763fdec7de33"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bc7f729a72b16ee21795a943f85c6244971724819819a41ddbaeb691b2dd85ad"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6ff02a91c4fc9b6a94e1c9c20f62ea06a7e375f42fe57587f004d1078ac86ca9"}, + {file = "wrapt-1.17.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dfb7cff84e72e7bf975b06b4989477873dcf160b2fd89959c629535df53d4e0"}, + {file = "wrapt-1.17.0-cp39-cp39-win32.whl", hash = "sha256:2399408ac33ffd5b200480ee858baa58d77dd30e0dd0cab6a8a9547135f30a88"}, + {file = "wrapt-1.17.0-cp39-cp39-win_amd64.whl", hash = "sha256:4f763a29ee6a20c529496a20a7bcb16a73de27f5da6a843249c7047daf135977"}, + {file = "wrapt-1.17.0-py3-none-any.whl", hash = "sha256:d2c63b93548eda58abf5188e505ffed0229bf675f7c3090f8e36ad55b8cbc371"}, + {file = "wrapt-1.17.0.tar.gz", hash = "sha256:16187aa2317c731170a88ef35e8937ae0f533c402872c1ee5e6d079fcf320801"}, ] [[package]] diff --git a/src/program/program.py b/src/program/program.py index dfda5cd1..1f3fab7c 100644 --- a/src/program/program.py +++ b/src/program/program.py @@ -254,10 +254,11 @@ def _schedule_functions(self) -> None: } if settings_manager.settings.symlink.repair_symlinks: - scheduled_functions[fix_broken_symlinks] = { - "interval": 60 * 60 * settings_manager.settings.symlink.repair_interval, - "args": [settings_manager.settings.symlink.library_path, settings_manager.settings.symlink.rclone_path] - } + # scheduled_functions[fix_broken_symlinks] = { + # "interval": 60 * 60 * settings_manager.settings.symlink.repair_interval, + # "args": [settings_manager.settings.symlink.library_path, settings_manager.settings.symlink.rclone_path] + # } + logger.warning("Symlink repair is disabled, this will be re-enabled in the future.") for func, config in scheduled_functions.items(): self.scheduler.add_job( diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index 1063e8b6..5f1f57bb 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -11,7 +11,7 @@ DownloadedTorrent, NoMatchingFilesException, NotCachedException ) -from .alldebrid import AllDebridDownloader +# from .alldebrid import AllDebridDownloader from .realdebrid import RealDebridDownloader from .torbox import TorBoxDownloader @@ -24,7 +24,7 @@ def __init__(self): self.services = { RealDebridDownloader: RealDebridDownloader(), TorBoxDownloader: TorBoxDownloader(), - AllDebridDownloader: AllDebridDownloader() + # AllDebridDownloader: AllDebridDownloader() } self.service = next((service for service in self.services.values() if service.initialized), None) self.initialized = self.validate() @@ -39,8 +39,13 @@ def validate(self): def run(self, item: MediaItem): logger.debug(f"Starting download process for {item.log_string} ({item.id})") - download_success = False + if item.state == States.Downloaded: + logger.debug(f"Skipping {item.log_string} ({item.id}) as it has already been downloaded") + yield item + return + + download_success = False for stream in item.streams: container = self.validate_stream(stream, item) if not container: diff --git a/src/program/services/scrapers/shared.py b/src/program/services/scrapers/shared.py index b09b587e..c164ca3f 100644 --- a/src/program/services/scrapers/shared.py +++ b/src/program/services/scrapers/shared.py @@ -18,10 +18,11 @@ Session, ) +bucket_limit = settings_manager.settings.scraping.bucket_limit or 5 enable_aliases = settings_manager.settings.scraping.enable_aliases -settings_model = settings_manager.settings.ranking -ranking_model = models.get(settings_model.profile) -rtn = RTN(settings_model, ranking_model) +ranking_settings = settings_manager.settings.ranking +ranking_model = models.get(ranking_settings.profile) +rtn = RTN(ranking_settings, ranking_model) class ScraperRequestHandler(BaseRequestHandler): @@ -133,10 +134,11 @@ def _parse_results(item: MediaItem, results: Dict[str, str], log_msg: bool = Tru if torrents: logger.log("SCRAPER", f"Found {len(torrents)} streams for {item.log_string}") - torrents = sort_torrents(torrents) + torrents = sort_torrents(torrents, bucket_limit=bucket_limit) torrents_dict = {} for torrent in torrents.values(): torrents_dict[torrent.infohash] = Stream(torrent) + logger.log("SCRAPER", f"Kept {len(torrents_dict)} streams for {item.log_string} after processing bucket limit") return torrents_dict return {} diff --git a/src/program/settings/manager.py b/src/program/settings/manager.py index 5b7ffbb9..43b50898 100644 --- a/src/program/settings/manager.py +++ b/src/program/settings/manager.py @@ -69,7 +69,8 @@ def load(self, settings_dict: dict | None = None): self.settings = AppModel.model_validate(settings_dict) self.save() except ValidationError as e: - logger.error(f"Error validating settings: {e}") + formatted_error = format_validation_error(e) + logger.error(f"Settings validation failed:\n{formatted_error}") raise except json.JSONDecodeError as e: logger.error(f"Error parsing settings file: {e}") @@ -85,4 +86,14 @@ def save(self): file.write(self.settings.model_dump_json(indent=4)) +def format_validation_error(e: ValidationError) -> str: + """Format validation errors in a user-friendly way""" + messages = [] + for error in e.errors(): + field = ".".join(str(x) for x in error["loc"]) + message = error.get("msg") + messages.append(f"• {field}: {message}") + return "\n".join(messages) + + settings_manager = SettingsManager() \ No newline at end of file diff --git a/src/program/settings/models.py b/src/program/settings/models.py index f9726802..d9f1a73b 100644 --- a/src/program/settings/models.py +++ b/src/program/settings/models.py @@ -283,6 +283,7 @@ class ScraperModel(Observable): after_10: int = 24 parse_debug: bool = False enable_aliases: bool = True + bucket_limit: int = Field(default=5, ge=0, le=20) torrentio: TorrentioConfig = TorrentioConfig() knightcrawler: KnightcrawlerConfig = KnightcrawlerConfig() jackett: JackettConfig = JackettConfig() diff --git a/src/program/types.py b/src/program/types.py index d49c179c..15197932 100644 --- a/src/program/types.py +++ b/src/program/types.py @@ -11,7 +11,7 @@ TraktContent, ) from program.services.downloaders import ( - AllDebridDownloader, + # AllDebridDownloader, RealDebridDownloader, TorBoxDownloader ) @@ -38,7 +38,7 @@ Downloader = Union[ RealDebridDownloader, TorBoxDownloader, - AllDebridDownloader + # AllDebridDownloader ] Service = Union[Content, SymlinkLibrary, Scraper, Downloader, Symlinker, Updater] From d4075359a2f5cdcdf6c82268fbdec79c6c523907 Mon Sep 17 00:00:00 2001 From: Gaisberg Date: Tue, 26 Nov 2024 10:03:39 +0200 Subject: [PATCH 10/12] fix: episode duplicate downloads --- src/program/services/downloaders/__init__.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index 5f1f57bb..7152c103 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -40,10 +40,9 @@ def validate(self): def run(self, item: MediaItem): logger.debug(f"Starting download process for {item.log_string} ({item.id})") - if item.state == States.Downloaded: - logger.debug(f"Skipping {item.log_string} ({item.id}) as it has already been downloaded") + if item.active_stream: + logger.debug(f"Skipping {item.log_string} ({item.id}) as it has already been downloaded by another download session") yield item - return download_success = False for stream in item.streams: From 2b80d4712cce499d7c0974e460632f55f406bc31 Mon Sep 17 00:00:00 2001 From: Gaisberg Date: Tue, 26 Nov 2024 10:32:51 +0200 Subject: [PATCH 11/12] fix: alldebrid after instant endpoint removal --- src/program/services/downloaders/__init__.py | 4 +- src/program/services/downloaders/alldebrid.py | 80 ++++++++----------- 2 files changed, 36 insertions(+), 48 deletions(-) diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index 7152c103..69892fb0 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -11,7 +11,7 @@ DownloadedTorrent, NoMatchingFilesException, NotCachedException ) -# from .alldebrid import AllDebridDownloader +from .alldebrid import AllDebridDownloader from .realdebrid import RealDebridDownloader from .torbox import TorBoxDownloader @@ -24,7 +24,7 @@ def __init__(self): self.services = { RealDebridDownloader: RealDebridDownloader(), TorBoxDownloader: TorBoxDownloader(), - # AllDebridDownloader: AllDebridDownloader() + AllDebridDownloader: AllDebridDownloader() } self.service = next((service for service in self.services.values() if service.initialized), None) self.initialized = self.validate() diff --git a/src/program/services/downloaders/alldebrid.py b/src/program/services/downloaders/alldebrid.py index 7d8a28b1..35b82a08 100644 --- a/src/program/services/downloaders/alldebrid.py +++ b/src/program/services/downloaders/alldebrid.py @@ -1,4 +1,5 @@ from datetime import datetime +import time from typing import Dict, Iterator, List, Optional, Tuple from loguru import logger @@ -120,54 +121,24 @@ def get_instant_availability(self, infohash: str, item_type: str) -> Optional[To Get instant availability for a single infohash Required by DownloaderBase """ - if not self.initialized: - logger.error("Downloader not properly initialized") - return None + torrent_id = None + return_value = None try: - params = {"magnets[]": infohash} - response = self.api.request_handler.execute(HttpMethod.GET, "magnet/instant", params=params) - magnets = response.get("magnets", []) - - if not magnets or not isinstance(magnets[0], dict) or "files" not in magnets[0]: - return None - - magnet = magnets[0] - files = magnet.get("files", []) - valid_files = self._process_files(files) - - if valid_files: - return TorrentContainer(infohash=magnet["hash"], files=valid_files) - - return None - + torrent_id = self.add_torrent(infohash) + time.sleep(1) + info = self.get_torrent_info(torrent_id) + if info.status == "Ready": + files = self.get_files_and_links(torrent_id) + processed_files = [DebridFile.create(filename=file["n"], filesize_bytes=file["s"], filetype=item_type) for file in files] + if processed_files is not None: + return_value = TorrentContainer(infohash=infohash, files=processed_files) except Exception as e: logger.error(f"Failed to get instant availability: {e}") - return None - - def _walk_files(self, files: List[dict]) -> Iterator[Tuple[str, int]]: - """Walks nested files structure and yields filename, size pairs""" - dirs = [] - for file in files: - try: - size = int(file.get("s", "")) - yield file.get("n", "UNKNOWN"), size - except ValueError: - dirs.append(file) - - for directory in dirs: - yield from self._walk_files(directory.get("e", [])) - - def _process_files(self, files: List[dict]) -> List[DebridFile]: - """Process and filter valid video files""" - result = [] - for i, (name, size) in enumerate(self._walk_files(files)): - if ( - any(name.lower().endswith(ext) for ext in VIDEO_EXTENSIONS) - and "sample" not in name.lower() - ): - result.append(DebridFile(file_id=i, filename=name, filesize=size)) - return result + finally: + if torrent_id: + self.delete_torrent(torrent_id) + return return_value def add_torrent(self, infohash: str) -> str: """ @@ -208,7 +179,7 @@ def select_files(self, torrent_id: str, _: List[str] = None) -> None: logger.error(f"Failed to select files for torrent {torrent_id}: {e}") raise - def get_torrent_info(self, torrent_id: str) -> dict: + def get_torrent_info(self, torrent_id: str) -> TorrentInfo: """ Get information about a torrent Required by DownloaderBase @@ -227,7 +198,7 @@ def get_torrent_info(self, torrent_id: str) -> dict: status=info["status"], bytes=info["size"], created_at=info["uploadDate"], - progress=info["size"] / info["downloaded"] + progress=(info["size"] / info["downloaded"]) if info["downloaded"] != 0 else 0 ) except Exception as e: logger.error(f"Failed to get torrent info for {torrent_id}: {e}") @@ -243,3 +214,20 @@ def delete_torrent(self, torrent_id: str): except Exception as e: logger.error(f"Failed to delete torrent {torrent_id}: {e}") raise + + def get_files_and_links(self, torrent_id: str) -> List[DebridFile]: + """ + Get torrent files and links by id + """ + try: + response = self.api.request_handler.execute( + HttpMethod.GET, + "magnet/files", + params={"id[]": torrent_id} + ) + magnet_info = next((info for info in response.get("magnets") if info["id"] == torrent_id), {}) + return magnet_info.get("files", {}) + + except Exception as e: + logger.error(f"Failed to get files for {torrent_id}: {e}") + raise \ No newline at end of file From 15aa7d61a470f31be447b185a51160b3b3de26d2 Mon Sep 17 00:00:00 2001 From: Spoked Date: Tue, 26 Nov 2024 14:27:49 -0500 Subject: [PATCH 12/12] chore: sorted imports and updated deps --- poetry.lock | 6 +++--- src/program/services/downloaders/__init__.py | 16 +++++++++++----- src/program/services/downloaders/alldebrid.py | 9 +++++++-- src/program/services/downloaders/models.py | 3 ++- src/program/services/downloaders/realdebrid.py | 12 ++++++++---- src/program/services/downloaders/shared.py | 7 ++++++- src/program/services/downloaders/torbox.py | 8 ++++++-- src/program/services/libraries/symlink.py | 2 +- src/program/state_transition.py | 1 + src/program/types.py | 5 ++--- src/routers/secure/scrape.py | 2 +- 11 files changed, 48 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index 09842cbb..24913bab 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1684,13 +1684,13 @@ files = [ [[package]] name = "pydantic" -version = "2.10.1" +version = "2.10.2" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" files = [ - {file = "pydantic-2.10.1-py3-none-any.whl", hash = "sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e"}, - {file = "pydantic-2.10.1.tar.gz", hash = "sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560"}, + {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, + {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, ] [package.dependencies] diff --git a/src/program/services/downloaders/__init__.py b/src/program/services/downloaders/__init__.py index 69892fb0..7993baac 100644 --- a/src/program/services/downloaders/__init__.py +++ b/src/program/services/downloaders/__init__.py @@ -1,15 +1,21 @@ from typing import List, Optional, Union + from loguru import logger -from program.media.item import MediaItem, Show, Season, Episode, Movie +from program.media.item import Episode, MediaItem, Movie, Season, Show from program.media.state import States from program.media.stream import Stream -from program.settings.manager import settings_manager -from program.services.downloaders.shared import parse_filename from program.services.downloaders.models import ( - DebridFile, ParsedFileData, TorrentContainer, TorrentInfo, - DownloadedTorrent, NoMatchingFilesException, NotCachedException + DebridFile, + DownloadedTorrent, + NoMatchingFilesException, + NotCachedException, + ParsedFileData, + TorrentContainer, + TorrentInfo, ) +from program.services.downloaders.shared import parse_filename +from program.settings.manager import settings_manager from .alldebrid import AllDebridDownloader from .realdebrid import RealDebridDownloader diff --git a/src/program/services/downloaders/alldebrid.py b/src/program/services/downloaders/alldebrid.py index 35b82a08..136b62e2 100644 --- a/src/program/services/downloaders/alldebrid.py +++ b/src/program/services/downloaders/alldebrid.py @@ -1,11 +1,17 @@ -from datetime import datetime import time +from datetime import datetime from typing import Dict, Iterator, List, Optional, Tuple from loguru import logger from requests import Session from requests.exceptions import ConnectTimeout +from program.services.downloaders.models import ( + VIDEO_EXTENSIONS, + DebridFile, + TorrentContainer, + TorrentInfo, +) from program.settings.manager import settings_manager from program.utils.request import ( BaseRequestHandler, @@ -15,7 +21,6 @@ create_service_session, get_rate_limit_params, ) -from program.services.downloaders.models import VIDEO_EXTENSIONS, DebridFile, TorrentContainer, TorrentInfo from .shared import DownloaderBase, premium_days_left diff --git a/src/program/services/downloaders/models.py b/src/program/services/downloaders/models.py index 396203e3..d7703010 100644 --- a/src/program/services/downloaders/models.py +++ b/src/program/services/downloaders/models.py @@ -1,9 +1,10 @@ from datetime import datetime from typing import Dict, List, Literal, Optional, Union + from loguru import logger from pydantic import BaseModel, Field -from program.settings.manager import settings_manager +from program.settings.manager import settings_manager DEFAULT_VIDEO_EXTENSIONS = ["mp4", "mkv", "avi"] ALLOWED_VIDEO_EXTENSIONS = [ diff --git a/src/program/services/downloaders/realdebrid.py b/src/program/services/downloaders/realdebrid.py index 27ea422b..1472b43d 100644 --- a/src/program/services/downloaders/realdebrid.py +++ b/src/program/services/downloaders/realdebrid.py @@ -1,12 +1,18 @@ +import time from datetime import datetime from enum import Enum -import time -from typing import List, Optional, Union +from typing import List, Optional, Union from loguru import logger from pydantic import BaseModel from requests import Session +from program.services.downloaders.models import ( + VIDEO_EXTENSIONS, + DebridFile, + TorrentContainer, + TorrentInfo, +) from program.settings.manager import settings_manager from program.utils.request import ( BaseRequestHandler, @@ -15,10 +21,8 @@ create_service_session, get_rate_limit_params, ) -from program.services.downloaders.models import DebridFile, TorrentContainer, TorrentInfo from .shared import DownloaderBase, premium_days_left -from program.services.downloaders.models import VIDEO_EXTENSIONS class RDTorrentStatus(str, Enum): diff --git a/src/program/services/downloaders/shared.py b/src/program/services/downloaders/shared.py index 309813e8..abb3e60c 100644 --- a/src/program/services/downloaders/shared.py +++ b/src/program/services/downloaders/shared.py @@ -1,9 +1,14 @@ from abc import ABC, abstractmethod from datetime import datetime from typing import List, Optional + from RTN import ParsedData, parse -from program.services.downloaders.models import ParsedFileData, TorrentInfo, TorrentContainer +from program.services.downloaders.models import ( + ParsedFileData, + TorrentContainer, + TorrentInfo, +) class DownloaderBase(ABC): diff --git a/src/program/services/downloaders/torbox.py b/src/program/services/downloaders/torbox.py index 76881535..2e08b77f 100644 --- a/src/program/services/downloaders/torbox.py +++ b/src/program/services/downloaders/torbox.py @@ -1,9 +1,15 @@ import time from datetime import datetime from typing import List, Optional, Union + from loguru import logger from requests import Session +from program.services.downloaders.models import ( + DebridFile, + TorrentContainer, + TorrentInfo, +) from program.settings.manager import settings_manager from program.utils.request import ( BaseRequestHandler, @@ -12,11 +18,9 @@ create_service_session, get_rate_limit_params, ) -from program.services.downloaders.models import TorrentContainer, DebridFile, TorrentInfo from .shared import DownloaderBase, premium_days_left - # class TBTorrentStatus(str, Enum): # """Torbox torrent status enumeration""" # MAGNET_ERROR = "magnet_error" diff --git a/src/program/services/libraries/symlink.py b/src/program/services/libraries/symlink.py index 15febd4f..09ac6244 100644 --- a/src/program/services/libraries/symlink.py +++ b/src/program/services/libraries/symlink.py @@ -6,8 +6,8 @@ from typing import TYPE_CHECKING, Generator from loguru import logger -from sqla_wrapper import Session from PTT import parse_title +from sqla_wrapper import Session from program.db.db import db from program.media.subtitle import Subtitle diff --git a/src/program/state_transition.py b/src/program/state_transition.py index 9ecec854..ed318512 100644 --- a/src/program/state_transition.py +++ b/src/program/state_transition.py @@ -1,4 +1,5 @@ from loguru import logger + from program.media import MediaItem, States from program.services.downloaders import Downloader from program.services.indexers.trakt import TraktIndexer diff --git a/src/program/types.py b/src/program/types.py index 15197932..817e9864 100644 --- a/src/program/types.py +++ b/src/program/types.py @@ -10,10 +10,9 @@ PlexWatchlist, TraktContent, ) -from program.services.downloaders import ( - # AllDebridDownloader, +from program.services.downloaders import ( # AllDebridDownloader, RealDebridDownloader, - TorBoxDownloader + TorBoxDownloader, ) # TorBoxDownloader, diff --git a/src/routers/secure/scrape.py b/src/routers/secure/scrape.py index 99ab0d46..132ef2fc 100644 --- a/src/routers/secure/scrape.py +++ b/src/routers/secure/scrape.py @@ -1,6 +1,6 @@ import asyncio -from datetime import datetime, timedelta import re +from datetime import datetime, timedelta from typing import Dict, List, Literal, Optional, TypeAlias, Union from uuid import uuid4