From fd205d393512200b6547a7f662d2af24618038a7 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 10:38:22 -0500 Subject: [PATCH 001/220] Add multiple key sorting --- tubesync/sync/utils.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index cf72462e..693ac038 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -1,6 +1,7 @@ import os import re import math +from operator import attrgetter from pathlib import Path import requests from PIL import Image @@ -134,6 +135,12 @@ def seconds_to_timestr(seconds): return '{:02d}:{:02d}:{:02d}'.format(hour, minutes, seconds) +def multi_key_sort(objs, specs): + for key, reverse in specs: + sorted(objs, key=attrgetter(key), reverse=reverse) + return objs + + def parse_media_format(format_dict): ''' This parser primarily adapts the format dict returned by youtube-dl into a From 5576449c8cc317f6ebea34f765183986177a1435 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 10:56:19 -0500 Subject: [PATCH 002/220] Use new multiple key sorting --- tubesync/sync/matching.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/matching.py b/tubesync/sync/matching.py index c453ff96..5d5b5dbd 100644 --- a/tubesync/sync/matching.py +++ b/tubesync/sync/matching.py @@ -5,6 +5,7 @@ ''' +from .utils import multi_key_sort from django.conf import settings @@ -49,6 +50,7 @@ def get_best_audio_format(media): ''' # Order all audio-only formats by bitrate audio_formats = [] + sort_keys = [('abr', True)] # key, reverse for fmt in media.iter_formats(): # If the format has a video stream, skip it if fmt['vcodec'] is not None: @@ -56,7 +58,7 @@ def get_best_audio_format(media): if not fmt['acodec']: continue audio_formats.append(fmt) - audio_formats = list(reversed(sorted(audio_formats, key=lambda k: k['abr']))) + audio_formats = list(multi_key_sort(audio_formats, sort_keys)) if not audio_formats: # Media has no audio formats at all return False, False @@ -86,6 +88,7 @@ def get_best_video_format(media): return False, False # Filter video-only formats by resolution that matches the source video_formats = [] + sort_keys = [('height', True), ('id', True)] # key, reverse for fmt in media.iter_formats(): # If the format has an audio stream, skip it if fmt['acodec'] is not None: @@ -109,7 +112,7 @@ def get_best_video_format(media): else: # Can't fallback return False, False - video_formats = list(reversed(sorted(video_formats, key=lambda k: k['height']))) + video_formats = list(multi_key_sort(video_formats, sort_keys)) source_resolution = media.source.source_resolution.strip().upper() source_vcodec = media.source.source_vcodec if not video_formats: From 77d72cd08cac770d5a65d552e9286ff0137e12f6 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 12:00:49 -0500 Subject: [PATCH 003/220] Adjust for ci for my fork --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index a2b1225b..785b11d2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -8,6 +8,7 @@ on: push: branches: - main + - 'test-*' jobs: test: @@ -38,7 +39,7 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 - name: Log into GitHub Container Registry - run: echo "${{ secrets.REGISTRY_ACCESS_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin + run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin - name: Lowercase github username for ghcr id: string uses: ASzc/change-string-case-action@v1 From 4d5717d157b7d0a1794f455e148f47431dc28527 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 12:03:45 -0500 Subject: [PATCH 004/220] Sort videos by id also (#5) * Add multiple key sorting * Use new multiple key sorting --- tubesync/sync/matching.py | 7 +++++-- tubesync/sync/utils.py | 7 +++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/matching.py b/tubesync/sync/matching.py index c453ff96..5d5b5dbd 100644 --- a/tubesync/sync/matching.py +++ b/tubesync/sync/matching.py @@ -5,6 +5,7 @@ ''' +from .utils import multi_key_sort from django.conf import settings @@ -49,6 +50,7 @@ def get_best_audio_format(media): ''' # Order all audio-only formats by bitrate audio_formats = [] + sort_keys = [('abr', True)] # key, reverse for fmt in media.iter_formats(): # If the format has a video stream, skip it if fmt['vcodec'] is not None: @@ -56,7 +58,7 @@ def get_best_audio_format(media): if not fmt['acodec']: continue audio_formats.append(fmt) - audio_formats = list(reversed(sorted(audio_formats, key=lambda k: k['abr']))) + audio_formats = list(multi_key_sort(audio_formats, sort_keys)) if not audio_formats: # Media has no audio formats at all return False, False @@ -86,6 +88,7 @@ def get_best_video_format(media): return False, False # Filter video-only formats by resolution that matches the source video_formats = [] + sort_keys = [('height', True), ('id', True)] # key, reverse for fmt in media.iter_formats(): # If the format has an audio stream, skip it if fmt['acodec'] is not None: @@ -109,7 +112,7 @@ def get_best_video_format(media): else: # Can't fallback return False, False - video_formats = list(reversed(sorted(video_formats, key=lambda k: k['height']))) + video_formats = list(multi_key_sort(video_formats, sort_keys)) source_resolution = media.source.source_resolution.strip().upper() source_vcodec = media.source.source_vcodec if not video_formats: diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index cf72462e..693ac038 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -1,6 +1,7 @@ import os import re import math +from operator import attrgetter from pathlib import Path import requests from PIL import Image @@ -134,6 +135,12 @@ def seconds_to_timestr(seconds): return '{:02d}:{:02d}:{:02d}'.format(hour, minutes, seconds) +def multi_key_sort(objs, specs): + for key, reverse in specs: + sorted(objs, key=attrgetter(key), reverse=reverse) + return objs + + def parse_media_format(format_dict): ''' This parser primarily adapts the format dict returned by youtube-dl into a From 75a56b7410f5f559b0e9a345f845c568c487f598 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 12:27:31 -0500 Subject: [PATCH 005/220] Remind that we are sorting a dict --- tubesync/sync/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 693ac038..18c646c6 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -1,7 +1,7 @@ import os import re import math -from operator import attrgetter +from operator import itemgetter from pathlib import Path import requests from PIL import Image @@ -135,10 +135,10 @@ def seconds_to_timestr(seconds): return '{:02d}:{:02d}:{:02d}'.format(hour, minutes, seconds) -def multi_key_sort(objs, specs): +def multi_key_sort(sort_dict, specs): for key, reverse in specs: - sorted(objs, key=attrgetter(key), reverse=reverse) - return objs + sorted(sort_dict, key=itemgetter(key), reverse=reverse) + return sort_dict def parse_media_format(format_dict): From 650fb88100281eb06ad0f32436892c6af456cf90 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 13:29:58 -0500 Subject: [PATCH 006/220] Add reversed argument for multiple key sorting --- tubesync/sync/utils.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 18c646c6..2597fca3 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -135,10 +135,12 @@ def seconds_to_timestr(seconds): return '{:02d}:{:02d}:{:02d}'.format(hour, minutes, seconds) -def multi_key_sort(sort_dict, specs): - for key, reverse in specs: +def multi_key_sort(sort_dict, specs, reversed=False): + for key, reverse in reversed(specs): sorted(sort_dict, key=itemgetter(key), reverse=reverse) - return sort_dict + if reversed: + return list(reversed(sort_dict)) + return list(sort_dict) def parse_media_format(format_dict): From 2d5f27b0a1a083573e813dcdb394fd18a789b218 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 13:33:00 -0500 Subject: [PATCH 007/220] Use reversed argument for multiple key sorting --- tubesync/sync/matching.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/matching.py b/tubesync/sync/matching.py index 5d5b5dbd..7fdc303a 100644 --- a/tubesync/sync/matching.py +++ b/tubesync/sync/matching.py @@ -50,7 +50,7 @@ def get_best_audio_format(media): ''' # Order all audio-only formats by bitrate audio_formats = [] - sort_keys = [('abr', True)] # key, reverse + sort_keys = [('abr', False)] # key, reverse for fmt in media.iter_formats(): # If the format has a video stream, skip it if fmt['vcodec'] is not None: @@ -58,7 +58,7 @@ def get_best_audio_format(media): if not fmt['acodec']: continue audio_formats.append(fmt) - audio_formats = list(multi_key_sort(audio_formats, sort_keys)) + audio_formats = multi_key_sort(audio_formats, sort_keys, True) if not audio_formats: # Media has no audio formats at all return False, False @@ -88,7 +88,7 @@ def get_best_video_format(media): return False, False # Filter video-only formats by resolution that matches the source video_formats = [] - sort_keys = [('height', True), ('id', True)] # key, reverse + sort_keys = [('height', False), ('id', False)] # key, reverse for fmt in media.iter_formats(): # If the format has an audio stream, skip it if fmt['acodec'] is not None: @@ -112,7 +112,7 @@ def get_best_video_format(media): else: # Can't fallback return False, False - video_formats = list(multi_key_sort(video_formats, sort_keys)) + video_formats = multi_key_sort(video_formats, sort_keys, True) source_resolution = media.source.source_resolution.strip().upper() source_vcodec = media.source.source_vcodec if not video_formats: From 1ba18d34895849a73a8dff06dd587611f2682396 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 13:40:30 -0500 Subject: [PATCH 008/220] Do not shadow a function we are using --- tubesync/sync/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 2597fca3..74e60abc 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -135,10 +135,10 @@ def seconds_to_timestr(seconds): return '{:02d}:{:02d}:{:02d}'.format(hour, minutes, seconds) -def multi_key_sort(sort_dict, specs, reversed=False): +def multi_key_sort(sort_dict, specs, use_reversed=False): for key, reverse in reversed(specs): sorted(sort_dict, key=itemgetter(key), reverse=reverse) - if reversed: + if use_reversed: return list(reversed(sort_dict)) return list(sort_dict) From 01cbb678425872adad26167bf5a9be26e115e690 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 19:28:35 -0500 Subject: [PATCH 009/220] Correct the flow of the sorted lists --- tubesync/sync/utils.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 74e60abc..966fb0e8 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -136,11 +136,12 @@ def seconds_to_timestr(seconds): def multi_key_sort(sort_dict, specs, use_reversed=False): + result = list(sort_dict) for key, reverse in reversed(specs): - sorted(sort_dict, key=itemgetter(key), reverse=reverse) + result = sorted(result, key=itemgetter(key), reverse=reverse) if use_reversed: - return list(reversed(sort_dict)) - return list(sort_dict) + return list(reversed(result)) + return list(result) def parse_media_format(format_dict): From b4e8651d4b0b49e9e461ce822d7dfd652971b859 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 20:12:30 -0500 Subject: [PATCH 010/220] The result should already be a list --- tubesync/sync/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 966fb0e8..f49c9894 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -141,7 +141,7 @@ def multi_key_sort(sort_dict, specs, use_reversed=False): result = sorted(result, key=itemgetter(key), reverse=reverse) if use_reversed: return list(reversed(result)) - return list(result) + return result def parse_media_format(format_dict): From 58c1c2099b7090e94065209cf716064ea6ebd627 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 2 Dec 2024 23:09:10 -0500 Subject: [PATCH 011/220] Match against height instead of format --- tubesync/sync/matching.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/matching.py b/tubesync/sync/matching.py index 7fdc303a..75009dff 100644 --- a/tubesync/sync/matching.py +++ b/tubesync/sync/matching.py @@ -22,7 +22,7 @@ def get_best_combined_format(media): ''' for fmt in media.iter_formats(): # Check height matches - if media.source.source_resolution.strip().upper() != fmt['format']: + if media.source.source_resolution_height != fmt['height']: continue # Check the video codec matches if media.source.source_vcodec != fmt['vcodec']: @@ -97,6 +97,8 @@ def get_best_video_format(media): continue if media.source.source_resolution.strip().upper() == fmt['format']: video_formats.append(fmt) + elif media.source.source_resolution_height == fmt['height']: + video_formats.append(fmt) # Check we matched some streams if not video_formats: # No streams match the requested resolution, see if we can fallback From 386041bf1278b3e3c7d14322918a5eac0400f7fe Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 4 Dec 2024 16:36:52 -0500 Subject: [PATCH 012/220] Adjust VP09 vcodec to match VP09 source vcodec --- tubesync/sync/utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index f49c9894..3dfcba2d 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -158,6 +158,8 @@ def parse_media_format(format_dict): vcodec = None if vcodec == 'NONE': vcodec = None + if vcodec == 'VP09': + vcodec = 'VP9' acodec_full = format_dict.get('acodec', '') acodec_parts = acodec_full.split('.') if len(acodec_parts) > 0: From 87df199ded1c01f85104bdc243dc626e0f73ed57 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 4 Dec 2024 16:42:41 -0500 Subject: [PATCH 013/220] Use the constant from Source --- tubesync/sync/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 3dfcba2d..5fb5ddb5 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -8,6 +8,7 @@ from django.conf import settings from urllib.parse import urlsplit, parse_qs from django.forms import ValidationError +from .models import Source def validate_url(url, validator): @@ -159,7 +160,7 @@ def parse_media_format(format_dict): if vcodec == 'NONE': vcodec = None if vcodec == 'VP09': - vcodec = 'VP9' + vcodec = Source.SOURCE_VCODEC_VP9 acodec_full = format_dict.get('acodec', '') acodec_parts = acodec_full.split('.') if len(acodec_parts) > 0: From 837a39aa6b926014929e663aee536263f684a938 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 4 Dec 2024 16:52:10 -0500 Subject: [PATCH 014/220] Use the audio format list from youtube-dl --- tubesync/sync/matching.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/tubesync/sync/matching.py b/tubesync/sync/matching.py index 75009dff..6a72972d 100644 --- a/tubesync/sync/matching.py +++ b/tubesync/sync/matching.py @@ -48,9 +48,8 @@ def get_best_audio_format(media): Finds the best match for the source required audio format. If the source has a 'fallback' of fail this can return no match. ''' - # Order all audio-only formats by bitrate + # Reverse order all audio-only formats audio_formats = [] - sort_keys = [('abr', False)] # key, reverse for fmt in media.iter_formats(): # If the format has a video stream, skip it if fmt['vcodec'] is not None: @@ -58,18 +57,18 @@ def get_best_audio_format(media): if not fmt['acodec']: continue audio_formats.append(fmt) - audio_formats = multi_key_sort(audio_formats, sort_keys, True) + audio_formats = list(reversed(audio_formats)) if not audio_formats: # Media has no audio formats at all return False, False - # Find the highest bitrate audio format with a matching codec + # Find the first audio format with a matching codec for fmt in audio_formats: if media.source.source_acodec == fmt['acodec']: # Matched! return True, fmt['id'] # No codecs matched if media.source.can_fallback: - # Can fallback, find the next highest bitrate non-matching codec + # Can fallback, find the next non-matching codec return False, audio_formats[0]['id'] else: # Can't fallback From b94466fa77a291dc804bca634c49e793b47de5d3 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 4 Dec 2024 17:03:18 -0500 Subject: [PATCH 015/220] Return for empty format lists first --- tubesync/sync/matching.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/matching.py b/tubesync/sync/matching.py index 6a72972d..982af040 100644 --- a/tubesync/sync/matching.py +++ b/tubesync/sync/matching.py @@ -57,10 +57,10 @@ def get_best_audio_format(media): if not fmt['acodec']: continue audio_formats.append(fmt) - audio_formats = list(reversed(audio_formats)) if not audio_formats: # Media has no audio formats at all return False, False + audio_formats = list(reversed(audio_formats)) # Find the first audio format with a matching codec for fmt in audio_formats: if media.source.source_acodec == fmt['acodec']: @@ -113,12 +113,12 @@ def get_best_video_format(media): else: # Can't fallback return False, False - video_formats = multi_key_sort(video_formats, sort_keys, True) - source_resolution = media.source.source_resolution.strip().upper() - source_vcodec = media.source.source_vcodec if not video_formats: # Still no matches return False, False + video_formats = multi_key_sort(video_formats, sort_keys, True) + source_resolution = media.source.source_resolution.strip().upper() + source_vcodec = media.source.source_vcodec exact_match, best_match = None, None # Of our filtered video formats, check for resolution + codec + hdr + fps match if media.source.prefer_60fps and media.source.prefer_hdr: From 4fcb1e8da320d729a62187eac18025c7ccd2fc9a Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 4 Dec 2024 17:08:37 -0500 Subject: [PATCH 016/220] Include streams with a blank format_note and the proper height --- tubesync/sync/matching.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/matching.py b/tubesync/sync/matching.py index 982af040..0b1f8d34 100644 --- a/tubesync/sync/matching.py +++ b/tubesync/sync/matching.py @@ -87,7 +87,7 @@ def get_best_video_format(media): return False, False # Filter video-only formats by resolution that matches the source video_formats = [] - sort_keys = [('height', False), ('id', False)] # key, reverse + sort_keys = [('height', False), ('vcodec', True), ('vbr', False)] # key, reverse for fmt in media.iter_formats(): # If the format has an audio stream, skip it if fmt['acodec'] is not None: @@ -120,6 +120,10 @@ def get_best_video_format(media): source_resolution = media.source.source_resolution.strip().upper() source_vcodec = media.source.source_vcodec exact_match, best_match = None, None + for fmt in video_formats: + # format_note was blank, match height instead + if '' == fmt['format'] and fmt['height'] == media.source.source_resolution_height: + fmt['format'] = source_resolution # Of our filtered video formats, check for resolution + codec + hdr + fps match if media.source.prefer_60fps and media.source.prefer_hdr: for fmt in video_formats: From c0fa528a1c4f8b1d23336fe528358629eb560946 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 4 Dec 2024 22:24:27 +0000 Subject: [PATCH 017/220] Revert "Use the constant from Source" This reverts commit 87df199ded1c01f85104bdc243dc626e0f73ed57. --- tubesync/sync/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tubesync/sync/utils.py b/tubesync/sync/utils.py index 5fb5ddb5..3dfcba2d 100644 --- a/tubesync/sync/utils.py +++ b/tubesync/sync/utils.py @@ -8,7 +8,6 @@ from django.conf import settings from urllib.parse import urlsplit, parse_qs from django.forms import ValidationError -from .models import Source def validate_url(url, validator): @@ -160,7 +159,7 @@ def parse_media_format(format_dict): if vcodec == 'NONE': vcodec = None if vcodec == 'VP09': - vcodec = Source.SOURCE_VCODEC_VP9 + vcodec = 'VP9' acodec_full = format_dict.get('acodec', '') acodec_parts = acodec_full.split('.') if len(acodec_parts) > 0: From af83c0ab49535b32a5f89135090e460acf20a407 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 9 Dec 2024 18:51:26 -0500 Subject: [PATCH 018/220] Report db.sqlite3 size on dashboard --- tubesync/sync/views.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index 94e91432..b43ec241 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -85,6 +85,9 @@ def get_context_data(self, *args, **kwargs): data['config_dir'] = str(settings.CONFIG_BASE_DIR) data['downloads_dir'] = str(settings.DOWNLOAD_ROOT) data['database_connection'] = settings.DATABASE_CONNECTION_STR + if settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): + db_size = Path(settings.DATABASES["default"]["NAME"]).stat().st_size + data['database_connection'] += f' ({db_size} bytes)' return data From 7d7c20a272f1850533c401ed25d742439eb02662 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 9 Dec 2024 22:20:24 -0500 Subject: [PATCH 019/220] fixup: adjust for pathlib import --- tubesync/sync/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index b43ec241..d0f59e30 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -86,7 +86,7 @@ def get_context_data(self, *args, **kwargs): data['downloads_dir'] = str(settings.DOWNLOAD_ROOT) data['database_connection'] = settings.DATABASE_CONNECTION_STR if settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): - db_size = Path(settings.DATABASES["default"]["NAME"]).stat().st_size + db_size = pathlib.Path(settings.DATABASES["default"]["NAME"]).stat().st_size data['database_connection'] += f' ({db_size} bytes)' return data From e808fe262095b314c45274287387680b94d61419 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 10 Dec 2024 00:40:17 -0500 Subject: [PATCH 020/220] Accommodate test suite The tests are using a memory database. --- tubesync/sync/views.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index d0f59e30..a3b7fa02 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -85,8 +85,11 @@ def get_context_data(self, *args, **kwargs): data['config_dir'] = str(settings.CONFIG_BASE_DIR) data['downloads_dir'] = str(settings.DOWNLOAD_ROOT) data['database_connection'] = settings.DATABASE_CONNECTION_STR - if settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): - db_size = pathlib.Path(settings.DATABASES["default"]["NAME"]).stat().st_size + # Add the database filesize when using db.sqlite3 + db_name = settings.DATABASES["default"]["NAME"] + db_path = pathlib.Path(db_name) if '/' == db_name[0] else None + if db_path and settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): + db_size = db_path.stat().st_size data['database_connection'] += f' ({db_size} bytes)' return data From d0e509f3d86c68f55f0db5e633a080011a8639ed Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 10 Dec 2024 01:07:50 -0500 Subject: [PATCH 021/220] Compare a string to a string --- tubesync/sync/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index a3b7fa02..00b8774a 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -86,7 +86,7 @@ def get_context_data(self, *args, **kwargs): data['downloads_dir'] = str(settings.DOWNLOAD_ROOT) data['database_connection'] = settings.DATABASE_CONNECTION_STR # Add the database filesize when using db.sqlite3 - db_name = settings.DATABASES["default"]["NAME"] + db_name = str(settings.DATABASES["default"]["NAME"]) db_path = pathlib.Path(db_name) if '/' == db_name[0] else None if db_path and settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): db_size = db_path.stat().st_size From f1a67d8c82fa46ae3f21d93eb28d872b690bb4dc Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 10 Dec 2024 01:19:53 -0500 Subject: [PATCH 022/220] Group digits of bytes --- tubesync/sync/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/views.py b/tubesync/sync/views.py index 00b8774a..e998d559 100644 --- a/tubesync/sync/views.py +++ b/tubesync/sync/views.py @@ -90,7 +90,7 @@ def get_context_data(self, *args, **kwargs): db_path = pathlib.Path(db_name) if '/' == db_name[0] else None if db_path and settings.DATABASE_CONNECTION_STR.startswith('sqlite at '): db_size = db_path.stat().st_size - data['database_connection'] += f' ({db_size} bytes)' + data['database_connection'] += f' ({db_size:,} bytes)' return data From a11e3487d65e4cff14cb0d462e6015f08c6fe322 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 12:33:14 -0500 Subject: [PATCH 023/220] Add database options where newer Django expects them The `3.2.x` versions don't have a lot of the code that `5.1.x` uses for this. --- tubesync/tubesync/local_settings.py.container | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tubesync/tubesync/local_settings.py.container b/tubesync/tubesync/local_settings.py.container index a0426a4c..c52bf588 100644 --- a/tubesync/tubesync/local_settings.py.container +++ b/tubesync/tubesync/local_settings.py.container @@ -46,6 +46,13 @@ else: 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': CONFIG_BASE_DIR / 'db.sqlite3', + "OPTIONS": { + "transaction_mode": "IMMEDIATE", + "init_command": """ + PRAGMA auto_vacuum = INCREMENTAL; + PRAGMA incremental_vacuum(100); + """, + }, } } DATABASE_CONNECTION_STR = f'sqlite at "{DATABASES["default"]["NAME"]}"' From 658f690b5b4051530b73f17434b5e6076e14e92c Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 13:33:50 -0500 Subject: [PATCH 024/220] Add tubesync.sqlite3 --- tubesync/tubesync/sqlite3/base.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 tubesync/tubesync/sqlite3/base.py diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py new file mode 100644 index 00000000..1ecb4419 --- /dev/null +++ b/tubesync/tubesync/sqlite3/base.py @@ -0,0 +1,28 @@ +from django.db.backends.sqlite3 import base + + +class DatabaseWrapper(base.DatabaseWrapper): + + def _start_transaction_under_autocommit(self): + conn_params = self.get_connection_params() + if "transaction_mode" not in conn_params: + self.cursor().execute("BEGIN TRANSACTION") + else: + tm = str(conn_params["transaction_mode"]).upper().strip() + transaction_modes = frozenset(["DEFERRED", "EXCLUSIVE", "IMMEDIATE"]) + if tm in transaction_modes: + self.cursor().execute(f"BEGIN {tm} TRANSACTION") + else: + self.cursor().execute("BEGIN TRANSACTION") + + + def init_connection_state(self): + conn_params = self.get_connection_params() + if "init_command" in conn_params: + ic = str(conn_params["init_command"]) + cmds = ic.split(';') + with self.cursor() as cursor: + for init_cmd in cmds: + cursor.execute(init_cmd.strip()) + + From 7e872bf8b515297ebd6981556ff768250cf51f17 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 13:35:12 -0500 Subject: [PATCH 025/220] Use tubesync.sqlite3 --- tubesync/tubesync/local_settings.py.container | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/tubesync/local_settings.py.container b/tubesync/tubesync/local_settings.py.container index c52bf588..77b531c1 100644 --- a/tubesync/tubesync/local_settings.py.container +++ b/tubesync/tubesync/local_settings.py.container @@ -44,7 +44,7 @@ if database_dict: else: DATABASES = { 'default': { - 'ENGINE': 'django.db.backends.sqlite3', + 'ENGINE': 'tubesync.sqlite3', 'NAME': CONFIG_BASE_DIR / 'db.sqlite3', "OPTIONS": { "transaction_mode": "IMMEDIATE", From 0ab0605d2297adc2108cd3c00bbcc580a32ef66d Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 13:42:05 -0500 Subject: [PATCH 026/220] Keep legacy_alter_table off Newer Django will set this for us, but it won't matter if it is disabled twice. --- tubesync/tubesync/local_settings.py.container | 1 + 1 file changed, 1 insertion(+) diff --git a/tubesync/tubesync/local_settings.py.container b/tubesync/tubesync/local_settings.py.container index 77b531c1..3659b289 100644 --- a/tubesync/tubesync/local_settings.py.container +++ b/tubesync/tubesync/local_settings.py.container @@ -49,6 +49,7 @@ else: "OPTIONS": { "transaction_mode": "IMMEDIATE", "init_command": """ + PRAGMA legacy_alter_table = OFF; PRAGMA auto_vacuum = INCREMENTAL; PRAGMA incremental_vacuum(100); """, From 467ec947ff023d31d8a78db88c905bf71fc1f3e2 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 15:48:11 -0500 Subject: [PATCH 027/220] Configure isolation_level from transaction_mode TypeError: 'transaction_mode' is an invalid keyword argument for Connection() --- tubesync/tubesync/sqlite3/base.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index 1ecb4419..46db387a 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -25,4 +25,9 @@ def init_connection_state(self): for init_cmd in cmds: cursor.execute(init_cmd.strip()) + + def get_new_connection(self, conn_params): + conn_params["isolation_level"] = conn_params.pop("transaction_mode", "DEFERRED") + super().get_new_connection(conn_params) + From ebf1ed3ef44855a0b4782b58497b9c7f8df034fd Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 16:04:09 -0500 Subject: [PATCH 028/220] Filter out the init_command key TypeError: 'init_command' is an invalid keyword argument for Connection() --- tubesync/tubesync/sqlite3/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index 46db387a..05389730 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -27,7 +27,9 @@ def init_connection_state(self): def get_new_connection(self, conn_params): - conn_params["isolation_level"] = conn_params.pop("transaction_mode", "DEFERRED") - super().get_new_connection(conn_params) + filtered_params = conn_params.copy() + filtered_params["isolation_level"] = filtered_params.pop("transaction_mode", "DEFERRED") + _ = filtered_params.pop("init_command", None) + super().get_new_connection(filtered_params) From de52e55e34c9fee8da83584e63365011aa79d955 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 11 Dec 2024 16:30:48 -0500 Subject: [PATCH 029/220] Return get_new_connection Oops. It helps to have the resulting connection. --- tubesync/tubesync/sqlite3/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index 05389730..d09d68cd 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -30,6 +30,6 @@ def get_new_connection(self, conn_params): filtered_params = conn_params.copy() filtered_params["isolation_level"] = filtered_params.pop("transaction_mode", "DEFERRED") _ = filtered_params.pop("init_command", None) - super().get_new_connection(filtered_params) + return super().get_new_connection(filtered_params) From 21e9cbef4b71fd463486096b38b0dc216ce218af Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 09:03:08 -0500 Subject: [PATCH 030/220] Cleanup for _start_transaction_under_autocommit --- tubesync/tubesync/sqlite3/base.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index d09d68cd..28d0ebc7 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -5,15 +5,14 @@ class DatabaseWrapper(base.DatabaseWrapper): def _start_transaction_under_autocommit(self): conn_params = self.get_connection_params() - if "transaction_mode" not in conn_params: - self.cursor().execute("BEGIN TRANSACTION") - else: + transaction_modes = frozenset(["DEFERRED", "EXCLUSIVE", "IMMEDIATE"]) + + sql_statement = "BEGIN TRANSACTION" + if "transaction_mode" in conn_params: tm = str(conn_params["transaction_mode"]).upper().strip() - transaction_modes = frozenset(["DEFERRED", "EXCLUSIVE", "IMMEDIATE"]) if tm in transaction_modes: - self.cursor().execute(f"BEGIN {tm} TRANSACTION") - else: - self.cursor().execute("BEGIN TRANSACTION") + sql_statement = f"BEGIN {tm} TRANSACTION" + self.cursor().execute(sql_statement) def init_connection_state(self): From 7b033d6e620c5320c35ee6cff14c851922fc769e Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 09:52:51 -0500 Subject: [PATCH 031/220] Use a filter map to make maintenance easier A possible future improvement would be to define a map of which keys Connection accepts. Right now, this is removing keys after Connection fails because of an unknown key. This could be automated by using try and removing a key each time the exception is caught. --- tubesync/tubesync/sqlite3/base.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index 28d0ebc7..b2d10a20 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -26,9 +26,12 @@ def init_connection_state(self): def get_new_connection(self, conn_params): - filtered_params = conn_params.copy() - filtered_params["isolation_level"] = filtered_params.pop("transaction_mode", "DEFERRED") - _ = filtered_params.pop("init_command", None) + filter_map = { + "init_command": None, + "transaction_mode": ("isolation_level", "DEFERRED"), + } + filtered_params = {k: v for (k,v) in conn_params.items() if k not in filter_map} + filtered_params.update({v[0]: conn_params.get(k, v[1]) for (k,v) in filter_map.items() if v is not None}) return super().get_new_connection(filtered_params) From 32dead212689f2127e33b7932f01b21c50bb4d7e Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 10:35:46 -0500 Subject: [PATCH 032/220] Proof of concept for automated param removal --- tubesync/tubesync/sqlite3/base.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index b2d10a20..7e9e5f57 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -27,11 +27,24 @@ def init_connection_state(self): def get_new_connection(self, conn_params): filter_map = { - "init_command": None, "transaction_mode": ("isolation_level", "DEFERRED"), } filtered_params = {k: v for (k,v) in conn_params.items() if k not in filter_map} filtered_params.update({v[0]: conn_params.get(k, v[1]) for (k,v) in filter_map.items() if v is not None}) - return super().get_new_connection(filtered_params) + + attempt = 0 + connection = None + tries = len(filtered_params) + while connection is None and attempt < tries: + try: + attempt += 1 + connection = super().get_new_connection(filtered_params) + except TypeError as e: + # remove unaccepted param + print(e, flush=True) + print('Exception args:', flush=True) + print(e.args, flush=True) + del filtered_params["init_command"] + return connection From 41215b9148186882af3e7955983b225ade0c01e1 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 11:14:17 -0500 Subject: [PATCH 033/220] Automated invalid keyword argument removal Less manual maintenance as versions change is a win! --- tubesync/tubesync/sqlite3/base.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index 7e9e5f57..f59065f4 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -1,3 +1,4 @@ +import re from django.db.backends.sqlite3 import base @@ -30,7 +31,7 @@ def get_new_connection(self, conn_params): "transaction_mode": ("isolation_level", "DEFERRED"), } filtered_params = {k: v for (k,v) in conn_params.items() if k not in filter_map} - filtered_params.update({v[0]: conn_params.get(k, v[1]) for (k,v) in filter_map.items() if v is not None}) + filtered_params.update({v[0]: conn_params.get(k, v[1]) for (k,v) in filter_map.items()}) attempt = 0 connection = None @@ -40,11 +41,12 @@ def get_new_connection(self, conn_params): attempt += 1 connection = super().get_new_connection(filtered_params) except TypeError as e: - # remove unaccepted param - print(e, flush=True) - print('Exception args:', flush=True) - print(e.args, flush=True) - del filtered_params["init_command"] + prog = re.compile("^'(?P[^']+)' is an invalid keyword argument for Connection[()]{2}$") + match = prog.match(e.args[0]) + key = match.group('key') if match else None + if key is None: + raise e + del filtered_params[key] return connection From 3fe7203ed50b2c68d571663a1ea3662e60117266 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 12:13:27 -0500 Subject: [PATCH 034/220] Add and use _remove_invalid_keyword_argument --- tubesync/tubesync/sqlite3/base.py | 30 +++++++++++++++++++++--------- 1 file changed, 21 insertions(+), 9 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index f59065f4..812c5b16 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -25,7 +25,25 @@ def init_connection_state(self): for init_cmd in cmds: cursor.execute(init_cmd.strip()) - + + def _remove_invalid_keyword_argument(self, e, filtered_params): + key = None + try: + prog = re.compile(r"^'(?P[^']+)' is an invalid keyword argument for Connection[()]{2}$") + match = prog.match(e.args[0]) + except: + raise + else: + if match: + key = match.group('key') + finally: + # This isn't a TypeError we can handle + if key is None: + raise e + # remove the invalid keyword argument + del filtered_params[key] + + def get_new_connection(self, conn_params): filter_map = { "transaction_mode": ("isolation_level", "DEFERRED"), @@ -37,16 +55,10 @@ def get_new_connection(self, conn_params): connection = None tries = len(filtered_params) while connection is None and attempt < tries: + attempt += 1 try: - attempt += 1 connection = super().get_new_connection(filtered_params) except TypeError as e: - prog = re.compile("^'(?P[^']+)' is an invalid keyword argument for Connection[()]{2}$") - match = prog.match(e.args[0]) - key = match.group('key') if match else None - if key is None: - raise e - del filtered_params[key] + self._remove_invalid_keyword_argument(e, filtered_params) return connection - From 36b395ae30f73db92b4040412ae3ede5d1097af2 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 12:31:30 -0500 Subject: [PATCH 035/220] Don't pass the exception to _remove_invalid_keyword_argument --- tubesync/tubesync/sqlite3/base.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index 812c5b16..e4dfbcb8 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -26,22 +26,20 @@ def init_connection_state(self): cursor.execute(init_cmd.strip()) - def _remove_invalid_keyword_argument(self, e, filtered_params): - key = None + def _remove_invalid_keyword_argument(self, params): try: prog = re.compile(r"^'(?P[^']+)' is an invalid keyword argument for Connection[()]{2}$") match = prog.match(e.args[0]) - except: - raise else: if match: key = match.group('key') - finally: - # This isn't a TypeError we can handle - if key is None: - raise e - # remove the invalid keyword argument - del filtered_params[key] + try: + # remove the invalid keyword argument + del params[key] + else: + return True + + return False def get_new_connection(self, conn_params): @@ -59,6 +57,8 @@ def get_new_connection(self, conn_params): try: connection = super().get_new_connection(filtered_params) except TypeError as e: - self._remove_invalid_keyword_argument(e, filtered_params) + if not self._remove_invalid_keyword_argument(filtered_params): + # This isn't a TypeError we can handle + raise e return connection From ba0d5ab285793bf943bcd76473e1ce657a93e9ac Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 13:33:17 -0500 Subject: [PATCH 036/220] Regex and syntax tweaks I didn't pay close enough attention to the try grammar. --- tubesync/tubesync/sqlite3/base.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index e4dfbcb8..bee1b3f4 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -28,16 +28,15 @@ def init_connection_state(self): def _remove_invalid_keyword_argument(self, params): try: - prog = re.compile(r"^'(?P[^']+)' is an invalid keyword argument for Connection[()]{2}$") + prog = re.compile(r"^(?P['])(?P[^']+)(?P=quote) is an invalid keyword argument for Connection\(\)$") match = prog.match(e.args[0]) - else: - if match: - key = match.group('key') - try: - # remove the invalid keyword argument - del params[key] - else: - return True + + if match: + key = match.group('key') + try: + # remove the invalid keyword argument + del params[key] + return True return False From 11789df864154d5280e21dde29651f9fc8eeb33d Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 13:59:06 -0500 Subject: [PATCH 037/220] Pass the exception argument Fixed up the try syntax too. I really dislike try, but it's almost mandatory in Python. --- tubesync/tubesync/sqlite3/base.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index bee1b3f4..05889640 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -26,18 +26,23 @@ def init_connection_state(self): cursor.execute(init_cmd.strip()) - def _remove_invalid_keyword_argument(self, params): + def _remove_invalid_keyword_argument(self, arg_str, params): try: prog = re.compile(r"^(?P['])(?P[^']+)(?P=quote) is an invalid keyword argument for Connection\(\)$") - match = prog.match(e.args[0]) - - if match: + match = prog.match(arg_str) + if match is None: + return False key = match.group('key') - try: - # remove the invalid keyword argument - del params[key] + + # remove the invalid keyword argument + del params[key] + return True + except: + raise + # It's unlikely that this will ever be reached, however, + # it was left here intentionally, so don't remove it. return False @@ -56,8 +61,10 @@ def get_new_connection(self, conn_params): try: connection = super().get_new_connection(filtered_params) except TypeError as e: - if not self._remove_invalid_keyword_argument(filtered_params): + e_arg = str(e.args[0]) + if not self._remove_invalid_keyword_argument(e_arg, filtered_params): # This isn't a TypeError we can handle raise e return connection + From 5ee295db0119498423a2dad04adae778612aec09 Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 12 Dec 2024 14:05:23 -0500 Subject: [PATCH 038/220] Keep the argument extraction in the function --- tubesync/tubesync/sqlite3/base.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tubesync/tubesync/sqlite3/base.py b/tubesync/tubesync/sqlite3/base.py index 05889640..ccb709cb 100644 --- a/tubesync/tubesync/sqlite3/base.py +++ b/tubesync/tubesync/sqlite3/base.py @@ -26,10 +26,10 @@ def init_connection_state(self): cursor.execute(init_cmd.strip()) - def _remove_invalid_keyword_argument(self, arg_str, params): + def _remove_invalid_keyword_argument(self, e_args, params): try: prog = re.compile(r"^(?P['])(?P[^']+)(?P=quote) is an invalid keyword argument for Connection\(\)$") - match = prog.match(arg_str) + match = prog.match(str(e_args[0])) if match is None: return False key = match.group('key') @@ -61,8 +61,7 @@ def get_new_connection(self, conn_params): try: connection = super().get_new_connection(filtered_params) except TypeError as e: - e_arg = str(e.args[0]) - if not self._remove_invalid_keyword_argument(e_arg, filtered_params): + if not self._remove_invalid_keyword_argument(e.args, filtered_params): # This isn't a TypeError we can handle raise e return connection From 9381039b52e4188755c03f03990fca7a4bcc9400 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 21 Dec 2024 23:00:32 -0500 Subject: [PATCH 039/220] Update ci.yaml to not try main --- .github/workflows/ci.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 785b11d2..662b67b5 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -7,7 +7,6 @@ on: workflow_dispatch: push: branches: - - main - 'test-*' jobs: @@ -40,6 +39,9 @@ jobs: uses: docker/setup-buildx-action@v1 - name: Log into GitHub Container Registry run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin + - name: Find upstream ghcr slug + id: ghcrslug + run: gh api repos/:owner/:repo --jq .parent.full_name - name: Lowercase github username for ghcr id: string uses: ASzc/change-string-case-action@v1 From 525fe0c4c8786e85bf497f4ce916044bb62cf107 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 21 Dec 2024 23:13:05 -0500 Subject: [PATCH 040/220] Use env for docker login --- .github/workflows/ci.yaml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 662b67b5..4a7c9982 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -38,9 +38,15 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 - name: Log into GitHub Container Registry - run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin + env: + DOCKER_REGISTRY: https://ghcr.io + DOCKER_REGISTRY_USERNAME: ${{ github.actor }} + DOCKER_REGISTRY_PASSWORD: {{ secrets.GITHUB_TOKEN }} + run: docker login - name: Find upstream ghcr slug id: ghcrslug + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: gh api repos/:owner/:repo --jq .parent.full_name - name: Lowercase github username for ghcr id: string From 8dffc4ac3de568d3e7eca0997bfd756571dd07cc Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 21 Dec 2024 23:32:08 -0500 Subject: [PATCH 041/220] Create GHCR_UPSTREAM_SLUG variable --- .github/workflows/ci.yaml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4a7c9982..65ad5b04 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,13 +41,19 @@ jobs: env: DOCKER_REGISTRY: https://ghcr.io DOCKER_REGISTRY_USERNAME: ${{ github.actor }} - DOCKER_REGISTRY_PASSWORD: {{ secrets.GITHUB_TOKEN }} + DOCKER_REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }} run: docker login - name: Find upstream ghcr slug id: ghcrslug env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: gh api repos/:owner/:repo --jq .parent.full_name + run: | + { + delim='EOF' ; + printf -- 'GHCR_UPSTREAM_SLUG<<"%s"\n' "${delim}" ; + gh api repos/:owner/:repo --jq .parent.full_name ; + printf -- '%s\n' "${delim}" ; + } >> "$GITHUB_ENV" - name: Lowercase github username for ghcr id: string uses: ASzc/change-string-case-action@v1 From e1f123474354e8b446a4f16791b875815d456fd0 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 21 Dec 2024 23:39:58 -0500 Subject: [PATCH 042/220] Checkout for gh api to use --- .github/workflows/ci.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 65ad5b04..c6a06c33 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -43,6 +43,8 @@ jobs: DOCKER_REGISTRY_USERNAME: ${{ github.actor }} DOCKER_REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }} run: docker login + - name: Checkout + uses: actions/checkout@v4 - name: Find upstream ghcr slug id: ghcrslug env: From 2e7756115158cde728d8b483a890537819ffd760 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 21 Dec 2024 23:43:24 -0500 Subject: [PATCH 043/220] Use a distinct delimeter --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c6a06c33..78d3b793 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -51,8 +51,8 @@ jobs: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | { - delim='EOF' ; - printf -- 'GHCR_UPSTREAM_SLUG<<"%s"\n' "${delim}" ; + delim='"GHCR_UPSTREAM_SLUG_EOF"' ; + printf -- 'GHCR_UPSTREAM_SLUG<<%s\n' "${delim}" ; gh api repos/:owner/:repo --jq .parent.full_name ; printf -- '%s\n' "${delim}" ; } >> "$GITHUB_ENV" From c512db6f74fa079c84a89a80a713ab53bd517723 Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 22 Dec 2024 00:42:46 -0500 Subject: [PATCH 044/220] Remove ASzc/change-string-case-action@v1 --- .github/workflows/ci.yaml | 48 ++++++++++++++++++++++++--------------- 1 file changed, 30 insertions(+), 18 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 78d3b793..0a1ade93 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -33,16 +33,6 @@ jobs: containerise: runs-on: ubuntu-latest steps: - - name: Set up QEMU - uses: docker/setup-qemu-action@v1 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v1 - - name: Log into GitHub Container Registry - env: - DOCKER_REGISTRY: https://ghcr.io - DOCKER_REGISTRY_USERNAME: ${{ github.actor }} - DOCKER_REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }} - run: docker login - name: Checkout uses: actions/checkout@v4 - name: Find upstream ghcr slug @@ -51,23 +41,45 @@ jobs: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | { + var='GHCR_UPSTREAM_SLUG' ; delim='"GHCR_UPSTREAM_SLUG_EOF"' ; - printf -- 'GHCR_UPSTREAM_SLUG<<%s\n' "${delim}" ; + printf -- '%s<<%s\n' "${var}" "${delim}" ; gh api repos/:owner/:repo --jq .parent.full_name ; printf -- '%s\n' "${delim}" ; } >> "$GITHUB_ENV" - - name: Lowercase github username for ghcr - id: string - uses: ASzc/change-string-case-action@v1 - with: - string: ${{ github.actor }} + - name: Upstream registry ref + id: upstream + run: | + user="$(printf -- '%s\n' "${GHCR_UPSTREAM_SLUG}" | cut -d '/' -f 1)" + user_lowercase="$(printf -- '%s\n' "${user}" | awk '{print tolower($0);}')" ; + printf -- 'ref=ghcr.io/%s/%s:latest\n' \ + "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; + printf -- 'tag=ghcr.io/%s/%s:latest\n' \ + "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; + - name: Registry ref + id: origin + run: | + user_lowercase="$(printf -- '%s\n' "${GITHUB_ACTOR}" | awk '{print tolower($0);}')" ; + printf -- 'ref=ghcr.io/%s/%s:latest\n' \ + "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; + printf -- 'tag=ghcr.io/%s/%s:latest\n' \ + "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; + - name: Set up QEMU + uses: docker/setup-qemu-action@v1 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v1 + - name: Log into GitHub Container Registry + env: + DOCKER_REGISTRY_USERNAME: ${{ github.actor }} + DOCKER_REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }} + run: docker login https://ghcr.io - name: Build and push uses: docker/build-push-action@v2 with: platforms: linux/amd64,linux/arm64 push: true - tags: ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest - cache-from: type=registry,ref=ghcr.io/${{ steps.string.outputs.lowercase }}/${{ env.IMAGE_NAME }}:latest + tags: ${{ steps.origin.outputs.tag }} + cache-from: type=registry,ref=${{ steps.origin.outputs.ref }} cache-to: type=inline build-args: | IMAGE_NAME=${{ env.IMAGE_NAME }} From 4462ce9b2ec8873fb03e7f2a23dcefa43f4a69da Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 22 Dec 2024 01:01:02 -0500 Subject: [PATCH 045/220] docker login is finicky --- .github/workflows/ci.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0a1ade93..c40b95db 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -69,10 +69,8 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 - name: Log into GitHub Container Registry - env: - DOCKER_REGISTRY_USERNAME: ${{ github.actor }} - DOCKER_REGISTRY_PASSWORD: ${{ secrets.GITHUB_TOKEN }} - run: docker login https://ghcr.io + # run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin + run: echo '${{ secrets.GITHUB_TOKEN }}' | docker login --password-stdin --username '${{ github.actor }}' 'https://ghcr.io' - name: Build and push uses: docker/build-push-action@v2 with: From 64647dc6e144e95abedf1ef2b51fd6279fe7b287 Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 22 Dec 2024 01:09:00 -0500 Subject: [PATCH 046/220] Use upstream registry cache too --- .github/workflows/ci.yaml | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c40b95db..33efea83 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,8 +41,8 @@ jobs: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | { - var='GHCR_UPSTREAM_SLUG' ; - delim='"GHCR_UPSTREAM_SLUG_EOF"' ; + var='GH_UPSTREAM_SLUG' ; + delim='"'"${var}"'_EOF"' ; printf -- '%s<<%s\n' "${var}" "${delim}" ; gh api repos/:owner/:repo --jq .parent.full_name ; printf -- '%s\n' "${delim}" ; @@ -50,7 +50,7 @@ jobs: - name: Upstream registry ref id: upstream run: | - user="$(printf -- '%s\n' "${GHCR_UPSTREAM_SLUG}" | cut -d '/' -f 1)" + user="$(printf -- '%s\n' "${GH_UPSTREAM_SLUG}" | cut -d '/' -f 1)" user_lowercase="$(printf -- '%s\n' "${user}" | awk '{print tolower($0);}')" ; printf -- 'ref=ghcr.io/%s/%s:latest\n' \ "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; @@ -69,7 +69,6 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v1 - name: Log into GitHub Container Registry - # run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login https://ghcr.io -u ${{ github.actor }} --password-stdin run: echo '${{ secrets.GITHUB_TOKEN }}' | docker login --password-stdin --username '${{ github.actor }}' 'https://ghcr.io' - name: Build and push uses: docker/build-push-action@v2 @@ -77,7 +76,9 @@ jobs: platforms: linux/amd64,linux/arm64 push: true tags: ${{ steps.origin.outputs.tag }} - cache-from: type=registry,ref=${{ steps.origin.outputs.ref }} + cache-from: | + type=registry,ref=${{ steps.upstream.outputs.ref }} + type=registry,ref=${{ steps.origin.outputs.ref }} cache-to: type=inline build-args: | IMAGE_NAME=${{ env.IMAGE_NAME }} From 159ba8745bcd3082a4e14598c42b527fa5874775 Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 22 Dec 2024 01:25:13 -0500 Subject: [PATCH 047/220] Remove inaccurate and unneeded step id --- .github/workflows/ci.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 33efea83..4aadfd59 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -35,8 +35,7 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Find upstream ghcr slug - id: ghcrslug + - name: Find upstream GitHub slug env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | From 12e83e25de4135706bb3848908da783348decf7c Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 24 Dec 2024 06:13:20 -0500 Subject: [PATCH 048/220] Add human output for logged messages --- tubesync/sync/tasks.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/tubesync/sync/tasks.py b/tubesync/sync/tasks.py index 37983932..b84f0d81 100644 --- a/tubesync/sync/tasks.py +++ b/tubesync/sync/tasks.py @@ -146,11 +146,11 @@ def cleanup_old_media(): def cleanup_removed_media(source, videos): - media_objects = Media.objects.filter(source=source, downloaded=True) - for item in media_objects: - matching_source_item = [video['id'] for video in videos if video['id'] == item.key] + media_objects = Media.objects.filter(source=source) + for media in media_objects: + matching_source_item = [video['id'] for video in videos if video['id'] == media.key] if not matching_source_item: - log.info(f'{item.title} is no longer in source, removing') + log.info(f'{media.name} is no longer in source, removing') item.delete() @@ -199,7 +199,7 @@ def index_source_task(source_id): # Tack on a cleanup of old media cleanup_old_media() if source.delete_removed_media: - log.info(f'Cleaning up media no longer in source {source}') + log.info(f'Cleaning up media no longer in source: {source}') cleanup_removed_media(source, videos) @@ -236,7 +236,7 @@ def download_source_images(source_id): f'source exists with ID: {source_id}') return avatar, banner = source.get_image_url - log.info(f'Thumbnail URL for source with ID: {source_id} ' + log.info(f'Thumbnail URL for source with ID: {source_id} / {source} ' f'Avatar: {avatar} ' f'Banner: {banner}') if banner != None: @@ -269,7 +269,7 @@ def download_source_images(source_id): with open(file_path, 'wb') as f: f.write(django_file.read()) - log.info(f'Thumbnail downloaded for source with ID: {source_id}') + log.info(f'Thumbnail downloaded for source with ID: {source_id} / {source}') @background(schedule=0) @@ -285,7 +285,7 @@ def download_media_metadata(media_id): f'media exists with ID: {media_id}') return if media.manual_skip: - log.info(f'Task for ID: {media_id} skipped, due to task being manually skipped.') + log.info(f'Task for ID: {media_id} / {media} skipped, due to task being manually skipped.') return source = media.source metadata = media.index_metadata() @@ -306,7 +306,7 @@ def download_media_metadata(media_id): # Don't filter media here, the post_save signal will handle that media.save() log.info(f'Saved {len(media.metadata)} bytes of metadata for: ' - f'{source} / {media_id}') + f'{source} / {media}: {media_id}') @background(schedule=0) @@ -359,10 +359,10 @@ def download_media(media_id): return if media.skip: # Media was toggled to be skipped after the task was scheduled - log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but ' + log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but ' f'it is now marked to be skipped, not downloading') return - if media.downloaded and media.media_file: + if media.downloaded and media.media_file and media.media_file.name: # Media has been marked as downloaded before the download_media task was fired, # skip it log.warn(f'Download task triggered for media: {media} (UUID: {media.pk}) but ' @@ -430,7 +430,7 @@ def download_media(media_id): copyfile(media.thumb.path, media.thumbpath) # If selected, write an NFO file if media.source.write_nfo: - log.info(f'Writing media NFO file to: to: {media.nfopath}') + log.info(f'Writing media NFO file to: {media.nfopath}') write_text_file(media.nfopath, media.nfoxml) # Schedule a task to update media servers for mediaserver in MediaServer.objects.all(): @@ -446,7 +446,7 @@ def download_media(media_id): else: # Expected file doesn't exist on disk err = (f'Failed to download media: {media} (UUID: {media.pk}) to disk, ' - f'expected outfile does not exist: {media.filepath}') + f'expected outfile does not exist: {filepath}') log.error(err) # Raising an error here triggers the task to be re-attempted (or fail) raise DownloadFailedException(err) From 5faa23bff982da09c1c72e929fe52581e7d03c48 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 24 Dec 2024 23:52:54 -0500 Subject: [PATCH 049/220] fixup: missed an item to media change --- tubesync/sync/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/tasks.py b/tubesync/sync/tasks.py index b84f0d81..feb67d01 100644 --- a/tubesync/sync/tasks.py +++ b/tubesync/sync/tasks.py @@ -151,7 +151,7 @@ def cleanup_removed_media(source, videos): matching_source_item = [video['id'] for video in videos if video['id'] == media.key] if not matching_source_item: log.info(f'{media.name} is no longer in source, removing') - item.delete() + media.delete() @background(schedule=0) From 629c08168e23d8eb2a5af05262721b908e4757ec Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 04:28:07 -0500 Subject: [PATCH 050/220] Create a s6-overlay image to copy from This was largely inspired by: @socheatsok78 Our downloaded files are checked where that version doesn't do any verification of the downloads. --- Dockerfile | 85 ++++++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 79 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 76bb21b2..ad8d7104 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,83 @@ +ARG S6_VERSION="3.2.0.2" + +ARG SHA256_S6_AMD64="59289456ab1761e277bd456a95e737c06b03ede99158beb24f12b165a904f478" +ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f66f785" +ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae" + +ARG ALPINE_VERSION="latest" + +FROM alpine:${ALPINE_VERSION} AS s6-overlay-download +RUN apk add --no-cache curl + +ARG S6_VERSION +ARG S6_OVERLAY_VERSION="v${S6_VERSION}" + +ARG SHA256_S6_AMD64 +ARG SHA256_S6_ARM64 +ARG SHA256_S6_NOARCH + +RUN <| sha256 + diff -us sha256 "${f}.sha256" + sha256sum -c < "${f}.sha256" || exit + sha256sum -c < sha256 || exit + ln -v "${f}" /verified/ || exit + done + unset -v a f url + + mkdir -v /s6-overlay-rootfs + cd /s6-overlay-rootfs + for f in /verified/s6-overlay-*.tar.xz + do + tar -xpf "${f}" || exit + done + unset -v f +EOF + +FROM scratch AS s6-overlay +COPY --from=s6-overlay-download /s6-overlay-rootfs / + FROM debian:bookworm-slim ARG TARGETARCH ARG TARGETPLATFORM -ARG S6_VERSION="3.2.0.2" -ARG SHA256_S6_AMD64="59289456ab1761e277bd456a95e737c06b03ede99158beb24f12b165a904f478" -ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f66f785" -ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae" +ARG S6_VERSION +ARG SHA256_S6_AMD64 +ARG SHA256_S6_ARM64 +ARG SHA256_S6_NOARCH ARG FFMPEG_DATE="autobuild-2024-12-24-14-15" ARG FFMPEG_VERSION="N-118163-g954d55c2a4" @@ -26,6 +97,8 @@ ENV DEBIAN_FRONTEND="noninteractive" \ S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0" # Install third party software +COPY --from=s6-overlay / / + # Reminder: the SHELL handles all variables RUN decide_arch() { \ case "${TARGETARCH:=amd64}" in \ @@ -100,10 +173,10 @@ RUN decide_arch() { \ # Install s6 _file="/tmp/s6-overlay-noarch.tar.xz" && \ download_expected_file s6 noarch "${_file}" && \ - tar -C / -xpf "${_file}" && rm -f "${_file}" && \ + #tar -C / -xpf "${_file}" && rm -f "${_file}" && \ _file="/tmp/s6-overlay-${ARCH}.tar.xz" && \ download_expected_file s6 "${TARGETARCH}" "${_file}" && \ - tar -C / -xpf "${_file}" && rm -f "${_file}" && \ + #tar -C / -xpf "${_file}" && rm -f "${_file}" && \ file -L /command/s6-overlay-suexec && \ # Install ffmpeg _file="/tmp/ffmpeg-${ARCH}.tar.xz" && \ From 02a4e2c2ad2f2c3d8e98c30cf65ec5eedffbbab0 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 04:32:25 -0500 Subject: [PATCH 051/220] fixup: no longer a nested case --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index ad8d7104..16fbcb91 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,7 +24,7 @@ RUN < Date: Wed, 8 Jan 2025 04:42:19 -0500 Subject: [PATCH 052/220] set -x --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 16fbcb91..e37a05dd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -17,7 +17,7 @@ ARG SHA256_S6_ARM64 ARG SHA256_S6_NOARCH RUN < Date: Wed, 8 Jan 2025 04:49:21 -0500 Subject: [PATCH 053/220] fixup: adjust for a single argument --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index e37a05dd..273d7707 100644 --- a/Dockerfile +++ b/Dockerfile @@ -35,7 +35,7 @@ RUN < Date: Wed, 8 Jan 2025 04:54:27 -0500 Subject: [PATCH 054/220] fixup: download uses TARGETARCH --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 273d7707..ed6b5f91 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,6 +9,8 @@ ARG ALPINE_VERSION="latest" FROM alpine:${ALPINE_VERSION} AS s6-overlay-download RUN apk add --no-cache curl +ARG TARGETARCH + ARG S6_VERSION ARG S6_OVERLAY_VERSION="v${S6_VERSION}" From bc543191f1a6b168991c90e8b1c454b666ffc0ba Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 05:27:35 -0500 Subject: [PATCH 055/220] Add and use decide_arch --- Dockerfile | 29 ++++++++++++++++++----------- 1 file changed, 18 insertions(+), 11 deletions(-) diff --git a/Dockerfile b/Dockerfile index ed6b5f91..3d2e4470 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,17 +9,29 @@ ARG ALPINE_VERSION="latest" FROM alpine:${ALPINE_VERSION} AS s6-overlay-download RUN apk add --no-cache curl -ARG TARGETARCH - ARG S6_VERSION -ARG S6_OVERLAY_VERSION="v${S6_VERSION}" ARG SHA256_S6_AMD64 ARG SHA256_S6_ARM64 ARG SHA256_S6_NOARCH +ARG TARGETARCH + RUN <| sha256 diff -us sha256 "${f}.sha256" sha256sum -c < "${f}.sha256" || exit From 2a0375dd79f1868554fa735c5a0af6f62a8403f3 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 05:57:48 -0500 Subject: [PATCH 056/220] Remove s6 cases The `COPY` from `s6-overlay` is installing `s6` instead. --- Dockerfile | 37 ++++++++++--------------------------- 1 file changed, 10 insertions(+), 27 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3d2e4470..a04e3d4f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,3 +1,8 @@ +ARG FFMPEG_DATE="autobuild-2024-12-24-14-15" +ARG FFMPEG_VERSION="N-118163-g954d55c2a4" +ARG SHA256_FFMPEG_AMD64="798a7e5a0724139e6bb70df8921522b23be27028f9f551dfa83c305ec4ffaf3a" +ARG SHA256_FFMPEG_ARM64="c3e6cc0fec42cc7e3804014fbb02c1384a1a31ef13f6f9a36121f2e1216240c0" + ARG S6_VERSION="3.2.0.2" ARG SHA256_S6_AMD64="59289456ab1761e277bd456a95e737c06b03ede99158beb24f12b165a904f478" @@ -84,14 +89,11 @@ ARG TARGETARCH ARG TARGETPLATFORM ARG S6_VERSION -ARG SHA256_S6_AMD64 -ARG SHA256_S6_ARM64 -ARG SHA256_S6_NOARCH -ARG FFMPEG_DATE="autobuild-2024-12-24-14-15" -ARG FFMPEG_VERSION="N-118163-g954d55c2a4" -ARG SHA256_FFMPEG_AMD64="798a7e5a0724139e6bb70df8921522b23be27028f9f551dfa83c305ec4ffaf3a" -ARG SHA256_FFMPEG_ARM64="c3e6cc0fec42cc7e3804014fbb02c1384a1a31ef13f6f9a36121f2e1216240c0" +ARG FFMPEG_DATE +ARG FFMPEG_VERSION +ARG SHA256_FFMPEG_AMD64 +ARG SHA256_FFMPEG_ARM64 ENV S6_VERSION="${S6_VERSION}" \ FFMPEG_DATE="${FFMPEG_DATE}" \ @@ -121,11 +123,6 @@ RUN decide_arch() { \ (amd64) printf -- '%s' "${SHA256_FFMPEG_AMD64}" ;; \ (arm64) printf -- '%s' "${SHA256_FFMPEG_ARM64}" ;; \ esac ;; \ - (s6) case "${2}" in \ - (amd64) printf -- '%s' "${SHA256_S6_AMD64}" ;; \ - (arm64) printf -- '%s' "${SHA256_S6_ARM64}" ;; \ - (noarch) printf -- '%s' "${SHA256_S6_NOARCH}" ;; \ - esac ;; \ esac ; \ } && \ decide_url() { \ @@ -142,14 +139,6 @@ RUN decide_arch() { \ (n*) printf -- '-%s\n' "${FFMPEG_VERSION#n}" | cut -d '-' -f 1,2 ;; \ (*) printf -- '' ;; \ esac)" ;; \ - (s6) printf -- \ - 'https://github.com/just-containers/s6-overlay/releases/download/v%s/s6-overlay-%s.tar.xz' \ - "${S6_VERSION}" \ - "$(case "${2}" in \ - (amd64) printf -- 'x86_64' ;; \ - (arm64) printf -- 'aarch64' ;; \ - (*) printf -- '%s' "${2}" ;; \ - esac)" ;; \ esac ; \ } && \ verify_download() { \ @@ -179,13 +168,7 @@ RUN decide_arch() { \ locale-gen en_US.UTF-8 && \ # Install required distro packages apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \ - # Install s6 - _file="/tmp/s6-overlay-noarch.tar.xz" && \ - download_expected_file s6 noarch "${_file}" && \ - #tar -C / -xpf "${_file}" && rm -f "${_file}" && \ - _file="/tmp/s6-overlay-${ARCH}.tar.xz" && \ - download_expected_file s6 "${TARGETARCH}" "${_file}" && \ - #tar -C / -xpf "${_file}" && rm -f "${_file}" && \ + # Installed s6 (using COPY earlier) file -L /command/s6-overlay-suexec && \ # Install ffmpeg _file="/tmp/ffmpeg-${ARCH}.tar.xz" && \ From 52fa3365f4315260cb605831440b983e09b7c2b0 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 06:06:14 -0500 Subject: [PATCH 057/220] Pull autobuild out of FFMPEG_DATE --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index a04e3d4f..755805ca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -ARG FFMPEG_DATE="autobuild-2024-12-24-14-15" +ARG FFMPEG_DATE="2024-12-24-14-15" ARG FFMPEG_VERSION="N-118163-g954d55c2a4" ARG SHA256_FFMPEG_AMD64="798a7e5a0724139e6bb70df8921522b23be27028f9f551dfa83c305ec4ffaf3a" ARG SHA256_FFMPEG_ARM64="c3e6cc0fec42cc7e3804014fbb02c1384a1a31ef13f6f9a36121f2e1216240c0" @@ -129,7 +129,7 @@ RUN decide_arch() { \ case "${1}" in \ (ffmpeg) printf -- \ 'https://github.com/yt-dlp/FFmpeg-Builds/releases/download/%s/ffmpeg-%s-linux%s-gpl%s.tar.xz' \ - "${FFMPEG_DATE}" \ + "autobuild-${FFMPEG_DATE}" \ "${FFMPEG_VERSION}" \ "$(case "${2}" in \ (amd64) printf -- '64' ;; \ From 2a6005e4487adb48ebc415ac4c473630aae5d583 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 8 Jan 2025 23:52:06 -0500 Subject: [PATCH 058/220] Bind mount Pipfile --- Dockerfile | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 755805ca..7f279f25 100644 --- a/Dockerfile +++ b/Dockerfile @@ -204,9 +204,6 @@ RUN set -x && \ # Copy over pip.conf to use piwheels COPY pip.conf /etc/pip.conf -# Add Pipfile -COPY Pipfile /app/Pipfile - # Do not include compiled byte-code ENV PIP_NO_COMPILE=1 \ PIP_NO_CACHE_DIR=1 \ @@ -216,7 +213,8 @@ ENV PIP_NO_COMPILE=1 \ WORKDIR /app # Set up the app -RUN set -x && \ +RUN --mount=type=bind,source=Pipfile,target=/app/Pipfile \ + set -x && \ apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ @@ -239,7 +237,6 @@ RUN set -x && \ cp -at /tmp/ "${HOME}" && \ PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \ # Clean up - rm /app/Pipfile && \ pipenv --clear && \ apt-get -y autoremove --purge \ default-libmysqlclient-dev \ From c26972f796f4a6190c7c883a36df3f71c7599ddf Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 01:22:27 -0500 Subject: [PATCH 059/220] Verify and cache downloads with Docker --- Dockerfile | 67 ++++++++++++++++-------------------------------------- 1 file changed, 19 insertions(+), 48 deletions(-) diff --git a/Dockerfile b/Dockerfile index 7f279f25..4f3e5fb0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,69 +11,40 @@ ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4f ARG ALPINE_VERSION="latest" -FROM alpine:${ALPINE_VERSION} AS s6-overlay-download -RUN apk add --no-cache curl - +FROM scratch AS s6-overlay-download-v${S6_VERSION} ARG S6_VERSION +ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}" + +ADD "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz.sha256" /downloaded/ +ADD "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz.sha256" /downloaded/ +ADD "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz.sha256" /downloaded/ ARG SHA256_S6_AMD64 ARG SHA256_S6_ARM64 ARG SHA256_S6_NOARCH +ADD "--checksum=sha256:${SHA256_S6_AMD64}" "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz" /downloaded/ +ADD "--checksum=sha256:${SHA256_S6_ARM64}" "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz" /downloaded/ +ADD "--checksum=sha256:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz" /downloaded/ -ARG TARGETARCH +FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted +ARG S6_VERSION +COPY --link --from="s6-overlay-download-v${S6_VERSION}" /downloaded /downloaded RUN <| sha256 - diff -us sha256 "${f}.sha256" - sha256sum -c < "${f}.sha256" || exit - sha256sum -c < sha256 || exit - ln -v "${f}" /verified/ || exit + sha256sum -c < "${f}" || exit + ln -v "${f%.sha256}" /verified/ || exit done - unset -v a f url + unset -v f mkdir -v /s6-overlay-rootfs cd /s6-overlay-rootfs - for f in /verified/s6-overlay-*.tar.xz + for f in /verified/*.tar* do tar -xpf "${f}" || exit done @@ -81,7 +52,7 @@ RUN < Date: Thu, 9 Jan 2025 01:26:45 -0500 Subject: [PATCH 060/220] Using the version in AS is unsupported --- Dockerfile | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4f3e5fb0..96082596 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4f ARG ALPINE_VERSION="latest" -FROM scratch AS s6-overlay-download-v${S6_VERSION} +FROM scratch AS s6-overlay-download ARG S6_VERSION ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}" @@ -27,8 +27,7 @@ ADD "--checksum=sha256:${SHA256_S6_ARM64}" "${S6_OVERLAY_URL}/s6-overlay-aarch64 ADD "--checksum=sha256:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz" /downloaded/ FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted -ARG S6_VERSION -COPY --link --from="s6-overlay-download-v${S6_VERSION}" /downloaded /downloaded +COPY --link --from=s6-overlay-download /downloaded /downloaded RUN < Date: Thu, 9 Jan 2025 01:44:15 -0500 Subject: [PATCH 061/220] Use a variable for the destination directory --- Dockerfile | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 96082596..c28b9cfe 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,22 +12,24 @@ ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4f ARG ALPINE_VERSION="latest" FROM scratch AS s6-overlay-download +ARG DESTDIR="/downloaded" + ARG S6_VERSION ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}" -ADD "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz.sha256" /downloaded/ -ADD "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz.sha256" /downloaded/ -ADD "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz.sha256" /downloaded/ +ADD --link "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz.sha256" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz.sha256" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz.sha256" "${DESTDIR}/" ARG SHA256_S6_AMD64 ARG SHA256_S6_ARM64 ARG SHA256_S6_NOARCH -ADD "--checksum=sha256:${SHA256_S6_AMD64}" "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz" /downloaded/ -ADD "--checksum=sha256:${SHA256_S6_ARM64}" "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz" /downloaded/ -ADD "--checksum=sha256:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz" /downloaded/ +ADD --link --checksum="sha256:${SHA256_S6_AMD64}" "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz" "${DESTDIR}/" +ADD --link --checksum="sha256:${SHA256_S6_ARM64}" "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz" "${DESTDIR}/" +ADD --link --checksum="sha256:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz" "${DESTDIR}/" FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted -COPY --link --from=s6-overlay-download /downloaded /downloaded +COPY --from=s6-overlay-download /downloaded /downloaded RUN < Date: Thu, 9 Jan 2025 02:00:16 -0500 Subject: [PATCH 062/220] Only extract our architecture & noarch --- Dockerfile | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index c28b9cfe..54bf03e9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,8 +31,23 @@ ADD --link --checksum="sha256:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted COPY --from=s6-overlay-download /downloaded /downloaded +ARG TARGETARCH + RUN < Date: Thu, 9 Jan 2025 02:06:24 -0500 Subject: [PATCH 063/220] Show the tar command in the logs --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 54bf03e9..e9e2a666 100644 --- a/Dockerfile +++ b/Dockerfile @@ -60,6 +60,7 @@ RUN < Date: Thu, 9 Jan 2025 02:24:45 -0500 Subject: [PATCH 064/220] Use more variables Store the S6_ARCH so that it runs once outside of the logs. Use a variable to allow changing the checksum algorithm easily. --- Dockerfile | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/Dockerfile b/Dockerfile index e9e2a666..729b9a57 100644 --- a/Dockerfile +++ b/Dockerfile @@ -13,20 +13,21 @@ ARG ALPINE_VERSION="latest" FROM scratch AS s6-overlay-download ARG DESTDIR="/downloaded" +ARG CHECKSUM_ALGORITHM="sha256" ARG S6_VERSION ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}" -ADD --link "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz.sha256" "${DESTDIR}/" -ADD --link "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz.sha256" "${DESTDIR}/" -ADD --link "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz.sha256" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" ARG SHA256_S6_AMD64 ARG SHA256_S6_ARM64 ARG SHA256_S6_NOARCH -ADD --link --checksum="sha256:${SHA256_S6_AMD64}" "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz" "${DESTDIR}/" -ADD --link --checksum="sha256:${SHA256_S6_ARM64}" "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz" "${DESTDIR}/" -ADD --link --checksum="sha256:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz" "${DESTDIR}/" +ADD --link --checksum="${CHECKSUM_ALGORITHM}:${SHA256_S6_AMD64}" "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz" "${DESTDIR}/" +ADD --link --checksum="${CHECKSUM_ALGORITHM}:${SHA256_S6_ARM64}" "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz" "${DESTDIR}/" +ADD --link --checksum="${CHECKSUM_ALGORITHM}:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz" "${DESTDIR}/" FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted COPY --from=s6-overlay-download /downloaded /downloaded @@ -58,13 +59,14 @@ RUN < Date: Thu, 9 Jan 2025 03:05:09 -0500 Subject: [PATCH 065/220] Rename variables The idea here is that the ADD lines for this stage should never need to change. This puts all the changes at the beginning of the stage and helps with the inconveniently named variables for the hashes which contains the algorithm as part of the name. --- Dockerfile | 31 +++++++++++++++++++++---------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/Dockerfile b/Dockerfile index 729b9a57..11b39f9c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,22 +12,33 @@ ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4f ARG ALPINE_VERSION="latest" FROM scratch AS s6-overlay-download +ARG S6_VERSION +ARG SHA256_S6_AMD64 +ARG SHA256_S6_ARM64 +ARG SHA256_S6_NOARCH + ARG DESTDIR="/downloaded" ARG CHECKSUM_ALGORITHM="sha256" -ARG S6_VERSION +ARG S6_HASH_AMD64="${SHA256_S6_AMD64}" +ARG S6_HASH_ARM64="${SHA256_S6_ARM64}" +ARG S6_HASH_NOARCH="${SHA256_S6_NOARCH}" + ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}" +ARG S6_PREFIX_FILE="s6-overlay-" +ARG S6_SUFFIX_FILE=".tar.xz" -ADD --link "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" -ADD --link "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" -ADD --link "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" +ARG S6_FILE_AMD64="${S6_PREFIX_FILE}x86_64${S6_SUFFIX_FILE}" +ARG S6_FILE_ARM64="${S6_PREFIX_FILE}aarch64${S6_SUFFIX_FILE}" +ARG S6_FILE_NOARCH="${S6_PREFIX_FILE}noarch${S6_SUFFIX_FILE}" -ARG SHA256_S6_AMD64 -ARG SHA256_S6_ARM64 -ARG SHA256_S6_NOARCH -ADD --link --checksum="${CHECKSUM_ALGORITHM}:${SHA256_S6_AMD64}" "${S6_OVERLAY_URL}/s6-overlay-x86_64.tar.xz" "${DESTDIR}/" -ADD --link --checksum="${CHECKSUM_ALGORITHM}:${SHA256_S6_ARM64}" "${S6_OVERLAY_URL}/s6-overlay-aarch64.tar.xz" "${DESTDIR}/" -ADD --link --checksum="${CHECKSUM_ALGORITHM}:${SHA256_S6_NOARCH}" "${S6_OVERLAY_URL}/s6-overlay-noarch.tar.xz" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/${S6_FILE_AMD64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/${S6_FILE_ARM64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" +ADD --link "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" + +ADD --link --checksum="${CHECKSUM_ALGORITHM}:${S6_HASH_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/" +ADD --link --checksum="${CHECKSUM_ALGORITHM}:${S6_HASH_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/" +ADD --link --checksum="${CHECKSUM_ALGORITHM}:${S6_HASH_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/" FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted COPY --from=s6-overlay-download /downloaded /downloaded From a1d13a7a0d33d8e6bb8a1a33a1da6f7ba476de2e Mon Sep 17 00:00:00 2001 From: tcely Date: Thu, 9 Jan 2025 03:18:35 -0500 Subject: [PATCH 066/220] Store the full checksum value This is mostly for locality. Everything mentioning `sha256` is together this way. --- Dockerfile | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 11b39f9c..2ea4a759 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,9 +20,9 @@ ARG SHA256_S6_NOARCH ARG DESTDIR="/downloaded" ARG CHECKSUM_ALGORITHM="sha256" -ARG S6_HASH_AMD64="${SHA256_S6_AMD64}" -ARG S6_HASH_ARM64="${SHA256_S6_ARM64}" -ARG S6_HASH_NOARCH="${SHA256_S6_NOARCH}" +ARG S6_CHECKSUM_AMD64="${CHECKSUM_ALGORITHM}:${SHA256_S6_AMD64}" +ARG S6_CHECKSUM_ARM64="${CHECKSUM_ALGORITHM}:${SHA256_S6_ARM64}" +ARG S6_CHECKSUM_NOARCH="${CHECKSUM_ALGORITHM}:${SHA256_S6_NOARCH}" ARG S6_OVERLAY_URL="https://github.com/just-containers/s6-overlay/releases/download/v${S6_VERSION}" ARG S6_PREFIX_FILE="s6-overlay-" @@ -36,9 +36,9 @@ ADD --link "${S6_OVERLAY_URL}/${S6_FILE_AMD64}.${CHECKSUM_ALGORITHM}" "${DESTDIR ADD --link "${S6_OVERLAY_URL}/${S6_FILE_ARM64}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" ADD --link "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}.${CHECKSUM_ALGORITHM}" "${DESTDIR}/" -ADD --link --checksum="${CHECKSUM_ALGORITHM}:${S6_HASH_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/" -ADD --link --checksum="${CHECKSUM_ALGORITHM}:${S6_HASH_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/" -ADD --link --checksum="${CHECKSUM_ALGORITHM}:${S6_HASH_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/" +ADD --link --checksum="${S6_CHECKSUM_AMD64}" "${S6_OVERLAY_URL}/${S6_FILE_AMD64}" "${DESTDIR}/" +ADD --link --checksum="${S6_CHECKSUM_ARM64}" "${S6_OVERLAY_URL}/${S6_FILE_ARM64}" "${DESTDIR}/" +ADD --link --checksum="${S6_CHECKSUM_NOARCH}" "${S6_OVERLAY_URL}/${S6_FILE_NOARCH}" "${DESTDIR}/" FROM alpine:${ALPINE_VERSION} AS s6-overlay-extracted COPY --from=s6-overlay-download /downloaded /downloaded From f450007eb90ffc917e077f141afa4f4af7ac7b0f Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 01:46:03 -0500 Subject: [PATCH 067/220] Update ffmpeg to the first build with checksums.sha256 --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2ea4a759..975b2a7e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,7 @@ -ARG FFMPEG_DATE="2024-12-24-14-15" -ARG FFMPEG_VERSION="N-118163-g954d55c2a4" -ARG SHA256_FFMPEG_AMD64="798a7e5a0724139e6bb70df8921522b23be27028f9f551dfa83c305ec4ffaf3a" -ARG SHA256_FFMPEG_ARM64="c3e6cc0fec42cc7e3804014fbb02c1384a1a31ef13f6f9a36121f2e1216240c0" +ARG FFMPEG_DATE="2025-01-10-19-43" +ARG FFMPEG_VERSION="N-118280-g5cd49e1bfd" +ARG SHA256_FFMPEG_AMD64="40842d6783e942904b0e8b5627e83e5d668c48e91396fe3629f440c75213027b" +ARG SHA256_FFMPEG_ARM64="e022ee91d1146dca07fdc0bda9f35141e72d2f186ac89714ca63ff501b606b9f" ARG S6_VERSION="3.2.0.2" From fe37d1de5de6c5ddd765328b683bae909563909f Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 03:35:41 -0500 Subject: [PATCH 068/220] Download ffmpeg with the sums file --- Dockerfile | 49 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/Dockerfile b/Dockerfile index 975b2a7e..e8f23e36 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,6 +11,55 @@ ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4f ARG ALPINE_VERSION="latest" +FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted +ARG FFMPEG_DATE +ARG FFMPEG_VERSION +ARG SHA256_FFMPEG_AMD64 +ARG SHA256_FFMPEG_ARM64 + +ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}" +ARG FFMPEG_PREFIX_FILE="ffmpeg-" +ARG FFMPEG_SUFFIX_FILE=".tar.xz" + +ARG FFMPEG_FILE_SUMS="checksums.sha256" + +ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" /downloaded/ +RUN < /tmp/downloads + unset -v url + aria2c --no-conf=true \ + --dir /downloaded \ + --check-integrity=true \ + --always-resume=false \ + --allow-overwrite=true \ + --max-connection-per-server=4 \ + --lowest-speed-limit='16K' \ + --input-file /tmp/downloads + + ls -alR /downloaded +EOF + FROM scratch AS s6-overlay-download ARG S6_VERSION ARG SHA256_S6_AMD64 From 1fc674c548e860aea4ba7daa843f3d6bc79b605f Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 04:10:30 -0500 Subject: [PATCH 069/220] Use the downloaded files or try curl --- Dockerfile | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index e8f23e36..f1abea5e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -163,6 +163,7 @@ ENV DEBIAN_FRONTEND="noninteractive" \ # Install third party software COPY --link --from=s6-overlay / / +COPY --from=ffmpeg-extracted /downloaded /tmp/ # Reminder: the SHELL handles all variables RUN decide_arch() { \ @@ -211,7 +212,10 @@ RUN decide_arch() { \ printf -- '%s\n' \ "Building for arch: ${2}|${ARCH}, downloading ${arg1} from: ${url}, expecting ${arg1} SHA256: ${expected}" && \ rm -rf "${file}" && \ - curl --disable --output "${file}" --clobber --location --no-progress-meter --url "${url}" && \ + { \ + cp -v -l /tmp/downloaded/${url##*/}" "${file}" || \ + curl --disable --output "${file}" --clobber --location --no-progress-meter --url "${url}" ; \ + } && \ verify_download "${expected}" "${file}" ; \ } && \ export ARCH="$(decide_arch)" && \ From cfd909f373a86c29ed3a9b4a54783b68c29a4d40 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 04:13:32 -0500 Subject: [PATCH 070/220] Turn off aria2c summary output loop --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index f1abea5e..fb1732c1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -55,6 +55,7 @@ RUN < Date: Sat, 11 Jan 2025 04:18:35 -0500 Subject: [PATCH 071/220] fixup: missing quote --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index fb1732c1..29d01202 100644 --- a/Dockerfile +++ b/Dockerfile @@ -214,7 +214,7 @@ RUN decide_arch() { \ "Building for arch: ${2}|${ARCH}, downloading ${arg1} from: ${url}, expecting ${arg1} SHA256: ${expected}" && \ rm -rf "${file}" && \ { \ - cp -v -l /tmp/downloaded/${url##*/}" "${file}" || \ + cp -v -l /tmp/downloaded/"${url##*/}" "${file}" || \ curl --disable --output "${file}" --clobber --location --no-progress-meter --url "${url}" ; \ } && \ verify_download "${expected}" "${file}" ; \ From 296ef4829a50874a6ca5bd86444fafa0cc0b3a47 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 04:23:05 -0500 Subject: [PATCH 072/220] fixup: copy into the expected directory --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 29d01202..c5f14173 100644 --- a/Dockerfile +++ b/Dockerfile @@ -164,7 +164,7 @@ ENV DEBIAN_FRONTEND="noninteractive" \ # Install third party software COPY --link --from=s6-overlay / / -COPY --from=ffmpeg-extracted /downloaded /tmp/ +COPY --from=ffmpeg-extracted /downloaded/ /tmp/downloaded/ # Reminder: the SHELL handles all variables RUN decide_arch() { \ From 1b2f62cde673e17cbaa8f4e83be8481f6bb0f2e3 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 04:27:10 -0500 Subject: [PATCH 073/220] Turn off aria2c console status line --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index c5f14173..bc9ec754 100644 --- a/Dockerfile +++ b/Dockerfile @@ -55,6 +55,7 @@ RUN < Date: Sat, 11 Jan 2025 04:40:41 -0500 Subject: [PATCH 074/220] Download only the version requested --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index bc9ec754..cd2a6380 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ ARG SHA256_FFMPEG_AMD64 ARG SHA256_FFMPEG_ARM64 ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}" -ARG FFMPEG_PREFIX_FILE="ffmpeg-" +ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION%%-*}" ARG FFMPEG_SUFFIX_FILE=".tar.xz" ARG FFMPEG_FILE_SUMS="checksums.sha256" From d6b73e170d1fd426c9dc906a23ea69cd84b52b66 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 05:44:16 -0500 Subject: [PATCH 075/220] Do the extraction for ffmpeg --- Dockerfile | 60 ++++++++++++++++++++++++++++++++++++++++++------------ 1 file changed, 47 insertions(+), 13 deletions(-) diff --git a/Dockerfile b/Dockerfile index cd2a6380..e5fb219c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,31 +16,54 @@ ARG FFMPEG_DATE ARG FFMPEG_VERSION ARG SHA256_FFMPEG_AMD64 ARG SHA256_FFMPEG_ARM64 +ARG CHECKSUM_ALGORITHM="sha256" +ARG FFMPEG_CHECKSUM_AMD64="${SHA256_FFMPEG_AMD64}" +ARG FFMPEG_CHECKSUM_ARM64="${SHA256_FFMPEG_ARM64}" ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}" ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION%%-*}" ARG FFMPEG_SUFFIX_FILE=".tar.xz" -ARG FFMPEG_FILE_SUMS="checksums.sha256" +ARG FFMPEG_FILE_SUMS="checksums.${CHECKSUM_ALGORITHM}" -ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" /downloaded/ +ARG DESTDIR="/downloaded" +ARG TARGETARCH +ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN < /tmp/downloads unset -v url + aria2c --no-conf=true \ --dir /downloaded \ --check-integrity=true \ --always-resume=false \ --allow-overwrite=true \ - --max-connection-per-server=4 \ + --max-connection-per-server=2 \ --lowest-speed-limit='16K' \ --show-console-readout=false \ --summary-interval=0 \ --input-file /tmp/downloads - ls -alR /downloaded + cd "${DESTDIR}" + "${CHECKSUM_ALGORITHM}sum" --check --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" + + mkdir -v -p "/verified/${TARGETARCH}" + printf -- '%s *%s\n' "$(decide_expected)" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" | "${CHECKSUM_ALGORITHM}sum" --check --strict + ln -v "/verified/${TARGETARCH}/" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" + + mkdir -v /extracted + cd /extracted + tar -xvvp \ + --strip-components=2 \ + --no-anchored \ + -f "/verified/${TARGETARCH}"/"${FFMPEG_PREFIX_FILE}"*"${FFMPEG_SUFFIX_FILE}" \ + 'ffmpeg' 'ffprobe' EOF FROM scratch AS s6-overlay-download @@ -165,7 +202,7 @@ ENV DEBIAN_FRONTEND="noninteractive" \ # Install third party software COPY --link --from=s6-overlay / / -COPY --from=ffmpeg-extracted /downloaded/ /tmp/downloaded/ +COPY --link --from=ffmpeg-extracted /extracted/ /usr/local/bin/ # Reminder: the SHELL handles all variables RUN decide_arch() { \ @@ -230,10 +267,7 @@ RUN decide_arch() { \ apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \ # Installed s6 (using COPY earlier) file -L /command/s6-overlay-suexec && \ - # Install ffmpeg - _file="/tmp/ffmpeg-${ARCH}.tar.xz" && \ - download_expected_file ffmpeg "${TARGETARCH}" "${_file}" && \ - tar -xvvpf "${_file}" --strip-components=2 --no-anchored -C /usr/local/bin/ "ffmpeg" "ffprobe" && rm -f "${_file}" && \ + # Install ffmpeg (using COPY earlier) file /usr/local/bin/ff* && \ # Clean up apt-get -y autoremove --purge curl file binutils xz-utils && \ From a86462989b4b7bf1488c48c829f5a1fe82ff8b72 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 05:53:38 -0500 Subject: [PATCH 076/220] fixup: split sha256 properly --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index e5fb219c..535efec1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -34,8 +34,8 @@ RUN < Date: Sat, 11 Jan 2025 05:56:16 -0500 Subject: [PATCH 077/220] fixup: use proper argument order for ln --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 535efec1..9b992b46 100644 --- a/Dockerfile +++ b/Dockerfile @@ -88,7 +88,7 @@ RUN < Date: Sat, 11 Jan 2025 06:08:26 -0500 Subject: [PATCH 078/220] Use GNU tar in a distinct step --- Dockerfile | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9b992b46..240fc8a3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -30,7 +30,7 @@ ARG DESTDIR="/downloaded" ARG TARGETARCH ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN <> /tmp/SUMS + "${CHECKSUM_ALGORITHM}sum" --check --strict /tmp/SUMS "${CHECKSUM_ALGORITHM}sum" --check --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" mkdir -v -p "/verified/${TARGETARCH}" - printf -- '%s *%s\n' "$(decide_expected)" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" | "${CHECKSUM_ALGORITHM}sum" --check --strict ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" +EOF +RUN < Date: Sat, 11 Jan 2025 06:10:50 -0500 Subject: [PATCH 079/220] xz is needed by tar --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 240fc8a3..ce3ccda1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -94,7 +94,7 @@ EOF RUN < Date: Sat, 11 Jan 2025 06:12:48 -0500 Subject: [PATCH 080/220] Log the files that were extracted --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index ce3ccda1..1a7877ae 100644 --- a/Dockerfile +++ b/Dockerfile @@ -103,6 +103,8 @@ RUN < Date: Sat, 11 Jan 2025 06:21:36 -0500 Subject: [PATCH 081/220] Don't preserve ownership from the builder --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 1a7877ae..81438eec 100644 --- a/Dockerfile +++ b/Dockerfile @@ -101,6 +101,7 @@ RUN < Date: Sat, 11 Jan 2025 06:34:30 -0500 Subject: [PATCH 082/220] Split into download, verify, and extract steps --- Dockerfile | 42 ++++++++++++++++++++++++------------------ 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/Dockerfile b/Dockerfile index 81438eec..66011e1b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -31,7 +31,7 @@ ARG TARGETARCH ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN <> /tmp/SUMS @@ -93,7 +99,7 @@ RUN < Date: Sat, 11 Jan 2025 07:27:45 -0500 Subject: [PATCH 083/220] Clean up unneeded code and output --- Dockerfile | 103 ++++++++++++++++------------------------------------- 1 file changed, 30 insertions(+), 73 deletions(-) diff --git a/Dockerfile b/Dockerfile index 66011e1b..91dc8384 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,7 +1,5 @@ ARG FFMPEG_DATE="2025-01-10-19-43" ARG FFMPEG_VERSION="N-118280-g5cd49e1bfd" -ARG SHA256_FFMPEG_AMD64="40842d6783e942904b0e8b5627e83e5d668c48e91396fe3629f440c75213027b" -ARG SHA256_FFMPEG_ARM64="e022ee91d1146dca07fdc0bda9f35141e72d2f186ac89714ca63ff501b606b9f" ARG S6_VERSION="3.2.0.2" @@ -10,21 +8,22 @@ ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f6 ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae" ARG ALPINE_VERSION="latest" +ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION%%-*}" +ARG FFMPEG_SUFFIX_FILE=".tar.xz" -FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted +FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE ARG FFMPEG_VERSION +ARG FFMPEG_PREFIX_FILE +ARG FFMPEG_SUFFIX_FILE ARG SHA256_FFMPEG_AMD64 ARG SHA256_FFMPEG_ARM64 ARG CHECKSUM_ALGORITHM="sha256" ARG FFMPEG_CHECKSUM_AMD64="${SHA256_FFMPEG_AMD64}" ARG FFMPEG_CHECKSUM_ARM64="${SHA256_FFMPEG_ARM64}" -ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}" -ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION%%-*}" -ARG FFMPEG_SUFFIX_FILE=".tar.xz" - ARG FFMPEG_FILE_SUMS="checksums.${CHECKSUM_ALGORITHM}" +ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobuild-${FFMPEG_DATE}" ARG DESTDIR="/downloaded" ARG TARGETARCH @@ -39,7 +38,11 @@ RUN <> /tmp/SUMS - "${CHECKSUM_ALGORITHM}sum" --check --strict /tmp/SUMS + if [ -n "${FFMPEG_HASH}" ] + then + printf -- '%s *%s\n' "${FFMPEG_HASH}" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" >> /tmp/SUMS + "${CHECKSUM_ALGORITHM}sum" --check --strict /tmp/SUMS || exit + fi "${CHECKSUM_ALGORITHM}sum" --check --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" mkdir -v -p "/verified/${TARGETARCH}" ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" EOF +FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted +COPY --from=ffmpeg-download /verified /verified + +ARG FFMPEG_PREFIX_FILE +ARG FFMPEG_SUFFIX_FILE +ARG TARGETARCH RUN < /etc/locale.gen && \ @@ -282,7 +239,7 @@ RUN decide_arch() { \ apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \ # Installed s6 (using COPY earlier) file -L /command/s6-overlay-suexec && \ - # Install ffmpeg (using COPY earlier) + # Installed ffmpeg (using COPY earlier) file /usr/local/bin/ff* && \ # Clean up apt-get -y autoremove --purge curl file binutils xz-utils && \ From 294f5af89d6337afc227199dcd8691cefed32fe2 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 07:41:43 -0500 Subject: [PATCH 084/220] Maybe pre-resolve conflict from main --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 91dc8384..483a47fd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -240,6 +240,7 @@ RUN set -x && \ # Installed s6 (using COPY earlier) file -L /command/s6-overlay-suexec && \ # Installed ffmpeg (using COPY earlier) + /usr/local/bin/ffmpeg -version && \ file /usr/local/bin/ff* && \ # Clean up apt-get -y autoremove --purge curl file binutils xz-utils && \ From f27749e35e6c5733d6b8bfcb18114def8dd1613e Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 08:05:52 -0500 Subject: [PATCH 085/220] Use --link wherever we can --- Dockerfile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index 6f3407d6..86315828 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,7 +27,7 @@ ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobu ARG DESTDIR="/downloaded" ARG TARGETARCH -ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" +ADD --link "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN < Date: Sat, 11 Jan 2025 08:35:51 -0500 Subject: [PATCH 086/220] Reduce layers and name the TubeSync stage --- Dockerfile | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/Dockerfile b/Dockerfile index 86315828..72baf080 100644 --- a/Dockerfile +++ b/Dockerfile @@ -123,9 +123,6 @@ RUN < Date: Sat, 11 Jan 2025 09:26:48 -0500 Subject: [PATCH 087/220] Download only the current architecture --- Dockerfile | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/Dockerfile b/Dockerfile index 72baf080..b660cf41 100644 --- a/Dockerfile +++ b/Dockerfile @@ -47,10 +47,18 @@ RUN < Date: Sat, 11 Jan 2025 09:47:55 -0500 Subject: [PATCH 088/220] Remove link from ADD lines --- Dockerfile | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index b660cf41..4195c12e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,7 +27,7 @@ ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobu ARG DESTDIR="/downloaded" ARG TARGETARCH -ADD --link "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" +ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN < Date: Sat, 11 Jan 2025 10:01:48 -0500 Subject: [PATCH 089/220] Debugging --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 4195c12e..450481b1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,7 +29,7 @@ ARG DESTDIR="/downloaded" ARG TARGETARCH ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN < Date: Sat, 11 Jan 2025 10:15:21 -0500 Subject: [PATCH 090/220] EOF wasn't working --- Dockerfile | 108 ++++++++++++++++++++++++++--------------------------- 1 file changed, 52 insertions(+), 56 deletions(-) diff --git a/Dockerfile b/Dockerfile index 450481b1..cf9a213e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,76 +28,72 @@ ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobu ARG DESTDIR="/downloaded" ARG TARGETARCH ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" -RUN < /tmp/downloads - unset -v url - +; \ + printf -- '\n' ; \ + } ; \ +\ + decide_arch() { \ + case "${TARGETARCH}" in \ + (amd64) printf -- 'linux64' ;; \ + (arm64) printf -- 'linuxarm64' ;; \ + esac ; \ + } ; \ +\ + FFMPEG_ARCH="$(decide_arch)" ; \ + for url in $(awk ' \ + $2 ~ /^[*]?'"${FFMPEG_PREFIX_FILE}"'/ && /-'"${FFMPEG_ARCH}"'-/ { $1=""; print; } \ + ' "${DESTDIR}/${FFMPEG_FILE_SUMS}") \ + do \ + url="${FFMPEG_URL}/${url# }" ; \ + printf -- '%s\n' "${url}" ; \ + aria2c_options "${url}" ; \ + printf -- '\n' ; \ + done > /tmp/downloads ; \ + unset -v url ; \ +\ aria2c --no-conf=true \ --dir /downloaded \ --lowest-speed-limit='16K' \ --show-console-readout=false \ --summary-interval=0 \ - --input-file /tmp/downloads - - apk --no-cache --no-progress add cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" - - decide_expected() { + --input-file /tmp/downloads ; \ +\ + apk --no-cache --no-progress add cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" ; \ +\ + decide_expected() { \ case "${TARGETARCH}" in \ (amd64) printf -- '%s' "${FFMPEG_CHECKSUM_AMD64}" ;; \ (arm64) printf -- '%s' "${FFMPEG_CHECKSUM_ARM64}" ;; \ - esac - } - - FFMPEG_HASH="$(decide_expected)" - - cd "${DESTDIR}" - if [ -n "${FFMPEG_HASH}" ] - then - printf -- '%s *%s\n' "${FFMPEG_HASH}" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" >> /tmp/SUMS - "${CHECKSUM_ALGORITHM}sum" --check --strict /tmp/SUMS || exit - fi - "${CHECKSUM_ALGORITHM}sum" --check --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" - - mkdir -v -p "/verified/${TARGETARCH}" - ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" - rm -rf "${DESTDIR}" -EOF + esac ; \ + } \ +\ + FFMPEG_HASH="$(decide_expected)" ; \ +\ + cd "${DESTDIR}" ; \ + if [ -n "${FFMPEG_HASH}" ] ; \ + then \ + printf -- '%s *%s\n' "${FFMPEG_HASH}" "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" >> /tmp/SUMS ; \ + "${CHECKSUM_ALGORITHM}sum" --check --strict /tmp/SUMS || exit ; \ + fi ; \ + "${CHECKSUM_ALGORITHM}sum" --check --strict --ignore-missing "${DESTDIR}/${FFMPEG_FILE_SUMS}" ; \ +\ + mkdir -v -p "/verified/${TARGETARCH}" ; \ + ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" ; \ + rm -rf "${DESTDIR}" ; FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted COPY --link --from=ffmpeg-download /verified /verified From d999fc5b375f466ce586aa53fceb586deeaf1e71 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 10:17:36 -0500 Subject: [PATCH 091/220] fixup: missed a needed semicolon --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index cf9a213e..0df2c4f9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -56,7 +56,7 @@ RUN set -eux ; \ FFMPEG_ARCH="$(decide_arch)" ; \ for url in $(awk ' \ $2 ~ /^[*]?'"${FFMPEG_PREFIX_FILE}"'/ && /-'"${FFMPEG_ARCH}"'-/ { $1=""; print; } \ - ' "${DESTDIR}/${FFMPEG_FILE_SUMS}") \ + ' "${DESTDIR}/${FFMPEG_FILE_SUMS}") ; \ do \ url="${FFMPEG_URL}/${url# }" ; \ printf -- '%s\n' "${url}" ; \ From abc17a35d62b2828e7c4d723c059b8130547c578 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 10:22:41 -0500 Subject: [PATCH 092/220] fixup: missed a needed semicolon --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 0df2c4f9..bfe373b3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -79,7 +79,7 @@ RUN set -eux ; \ (amd64) printf -- '%s' "${FFMPEG_CHECKSUM_AMD64}" ;; \ (arm64) printf -- '%s' "${FFMPEG_CHECKSUM_ARM64}" ;; \ esac ; \ - } \ + } ; \ \ FFMPEG_HASH="$(decide_expected)" ; \ \ From 6f0bdd0b74e4a1549bad1ca133d4bc42e525b01b Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 10:31:20 -0500 Subject: [PATCH 093/220] EOF wasn't working --- Dockerfile | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/Dockerfile b/Dockerfile index bfe373b3..b82ce9c0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -101,21 +101,20 @@ COPY --link --from=ffmpeg-download /verified /verified ARG FFMPEG_PREFIX_FILE ARG FFMPEG_SUFFIX_FILE ARG TARGETARCH -RUN < Date: Sat, 11 Jan 2025 11:00:19 -0500 Subject: [PATCH 094/220] EOF wasn't working --- Dockerfile | 78 ++++++++++++++++++++++++++---------------------------- 1 file changed, 38 insertions(+), 40 deletions(-) diff --git a/Dockerfile b/Dockerfile index b82ce9c0..0e670eeb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -114,7 +114,7 @@ RUN set -eu ; \ -f "/verified/${TARGETARCH}"/"${FFMPEG_PREFIX_FILE}"*"${FFMPEG_SUFFIX_FILE}" \ 'ffmpeg' 'ffprobe' ; \ \ - ls -AlR /extracted + ls -AlR /extracted ; FROM scratch AS s6-overlay-download ARG S6_VERSION @@ -150,45 +150,43 @@ COPY --link --from=s6-overlay-download /downloaded /downloaded ARG TARGETARCH -RUN < Date: Sat, 11 Jan 2025 11:03:51 -0500 Subject: [PATCH 095/220] Revert Debugging --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 0e670eeb..f5c2460e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ ARG FFMPEG_URL="https://github.com/yt-dlp/FFmpeg-Builds/releases/download/autobu ARG DESTDIR="/downloaded" ARG TARGETARCH ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" -RUN set -eux ; \ +RUN set -eu ; \ apk --no-cache --no-progress add cmd:aria2c cmd:awk ; \ \ aria2c_options() { \ From 99413fae2899bc18274a1c26d3ee086bbb6a1426 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 11:09:48 -0500 Subject: [PATCH 096/220] Legacy build wasn't happy about --mount --- Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index f5c2460e..5797dd02 100644 --- a/Dockerfile +++ b/Dockerfile @@ -267,7 +267,9 @@ ENV PIP_NO_COMPILE=1 \ WORKDIR /app # Set up the app -RUN --mount=type=bind,source=Pipfile,target=/app/Pipfile \ +#BuildKit#RUN --mount=type=bind,source=Pipfile,target=/app/Pipfile \ +COPY Pipfile /app/Pipfile +RUN \ set -x && \ apt-get update && \ # Install required build packages From 5c8c9cd9b820dfbc46b896f58e165a2b848b34f3 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 11 Jan 2025 11:27:47 -0500 Subject: [PATCH 097/220] Remove copied Pipfile again --- Dockerfile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 5797dd02..a69609c5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -258,6 +258,9 @@ RUN set -x && \ # Copy over pip.conf to use piwheels COPY pip.conf /etc/pip.conf +# Add Pipfile +COPY Pipfile /app/Pipfile + # Do not include compiled byte-code ENV PIP_NO_COMPILE=1 \ PIP_NO_CACHE_DIR=1 \ @@ -268,7 +271,6 @@ WORKDIR /app # Set up the app #BuildKit#RUN --mount=type=bind,source=Pipfile,target=/app/Pipfile \ -COPY Pipfile /app/Pipfile RUN \ set -x && \ apt-get update && \ @@ -293,6 +295,7 @@ RUN \ cp -at /tmp/ "${HOME}" && \ PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \ # Clean up + rm /app/Pipfile && \ pipenv --clear && \ apt-get -y autoremove --purge \ default-libmysqlclient-dev \ From c255ec487ab2cec01f33339bc09ceddd33888471 Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 12 Jan 2025 05:23:45 -0500 Subject: [PATCH 098/220] Revert "Dockerfile syntax and checks" --- Dockerfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index fc0150b0..d0107385 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,6 @@ # syntax=docker/dockerfile:1 # check=error=true -FROM debian:bookworm-slim - ARG FFMPEG_DATE="2025-01-10-19-43" ARG FFMPEG_VERSION="N-118280-g5cd49e1bfd" @@ -377,7 +375,7 @@ RUN set -x && \ COPY config/root / # Create a healthcheck -HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"] +HEALTHCHECK --interval=1m --timeout=10s CMD /app/healthcheck.py http://127.0.0.1:8080/healthcheck # ENVS and ports ENV PYTHONPATH="/app" PYTHONPYCACHEPREFIX="/config/cache/pycache" From 244d9b8cdac9a65c8472a487431215b08950a551 Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 12 Jan 2025 05:32:37 -0500 Subject: [PATCH 099/220] Redo reverted merge of patch-4 Without the extra FROM line at the top this time. --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index d0107385..554e0aaf 100644 --- a/Dockerfile +++ b/Dockerfile @@ -375,7 +375,7 @@ RUN set -x && \ COPY config/root / # Create a healthcheck -HEALTHCHECK --interval=1m --timeout=10s CMD /app/healthcheck.py http://127.0.0.1:8080/healthcheck +HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"] # ENVS and ports ENV PYTHONPATH="/app" PYTHONPYCACHEPREFIX="/config/cache/pycache" From 059fda844828c32071f03852da2207dbaf7700e7 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 00:17:44 -0500 Subject: [PATCH 100/220] Run apk once This was leftover from when the RUN was split apart in development. --- Dockerfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 554e0aaf..1b78c486 100644 --- a/Dockerfile +++ b/Dockerfile @@ -36,7 +36,7 @@ ARG DESTDIR="/downloaded" ARG TARGETARCH ADD "${FFMPEG_URL}/${FFMPEG_FILE_SUMS}" "${DESTDIR}/" RUN set -eu ; \ - apk --no-cache --no-progress add cmd:aria2c cmd:awk ; \ + apk --no-cache --no-progress add cmd:aria2c cmd:awk "cmd:${CHECKSUM_ALGORITHM}sum" ; \ \ aria2c_options() { \ algorithm="${CHECKSUM_ALGORITHM%[0-9]??}" ; \ @@ -80,8 +80,6 @@ RUN set -eu ; \ --summary-interval=0 \ --input-file /tmp/downloads ; \ \ - apk --no-cache --no-progress add "cmd:${CHECKSUM_ALGORITHM}sum" ; \ -\ decide_expected() { \ case "${TARGETARCH}" in \ (amd64) printf -- '%s' "${FFMPEG_CHECKSUM_AMD64}" ;; \ From e08a2ba5fe796bec3270a2c020400b324b42fac4 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 00:20:12 -0500 Subject: [PATCH 101/220] Show resulting files --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 1b78c486..39c9f6b6 100644 --- a/Dockerfile +++ b/Dockerfile @@ -99,7 +99,7 @@ RUN set -eu ; \ \ mkdir -v -p "/verified/${TARGETARCH}" ; \ ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" ; \ - rm -rf "${DESTDIR}" ; + rm -rf "${DESTDIR}" ; ls -alR /*ed ; FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted COPY --from=ffmpeg-download /verified /verified From e55ff80244b71c143bd8da64147c3ad45e60e328 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 01:33:08 -0500 Subject: [PATCH 102/220] Cache apt-get update --- Dockerfile | 38 +++++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) diff --git a/Dockerfile b/Dockerfile index 39c9f6b6..459f292e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,6 +11,8 @@ ARG SHA256_S6_ARM64="8b22a2eaca4bf0b27a43d36e65c89d2701738f628d1abd0cea5569619f6 ARG SHA256_S6_NOARCH="6dbcde158a3e78b9bb141d7bcb5ccb421e563523babbe2c64470e76f4fd02dae" ARG ALPINE_VERSION="latest" +ARG DEBIAN_VERSION="bookworm-slim" + ARG FFMPEG_PREFIX_FILE="ffmpeg-${FFMPEG_VERSION}" ARG FFMPEG_SUFFIX_FILE=".tar.xz" @@ -215,7 +217,22 @@ RUN set -eu ; \ FROM scratch AS s6-overlay COPY --from=s6-overlay-extracted /s6-overlay-rootfs / -FROM debian:bookworm-slim AS tubesync +FROM debian:${DEBIAN_VERSION} AS cache-apt +RUN \ + set -eu ; \ + DEBIAN_FRONTEND="noninteractive" apt-get update ; \ + cache_dir='/cache/apt' ; \ + mkdir -v -p "${cache_dir}" ; \ + tar -C /var/cache -cf "${cache_dir}"/cache.tar apt ; \ + tar -C /var/lib -cf "${cache_dir}"/lib.tar apt ; \ + file="${cache_dir}/apt.sh" ; \ + printf -- '#!/bin/sh\n\n' >| "${file}" ; \ + chmod -v a+rx "${file}" ; \ + printf -- '%s\n' >> "${file}" \ + 'tar -C /var/cache -xpf "${cache_dir}"/cache.tar ;' \ + 'tar -C /var/lib -xpf "${cache_dir}"/lib.tar ;' + +FROM debian:${DEBIAN_VERSION} AS tubesync ARG TARGETARCH ARG TARGETPLATFORM @@ -242,8 +259,9 @@ COPY --from=s6-overlay / / COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables -RUN set -x && \ - apt-get update && \ +RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ + set -x && \ + { test -x /cache/apt.sh && /cache/apt.sh || apt-get update ; } && \ apt-get -y --no-install-recommends install locales && \ printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \ locale-gen en_US.UTF-8 && \ @@ -261,8 +279,9 @@ RUN set -x && \ rm -rf /tmp/* # Install dependencies we keep -RUN set -x && \ - apt-get update && \ +RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ + set -x && \ + { test -x /cache/apt.sh && /cache/apt.sh || apt-get update ; } && \ # Install required distro packages apt-get -y --no-install-recommends install \ libjpeg62-turbo \ @@ -285,9 +304,6 @@ RUN set -x && \ # Copy over pip.conf to use piwheels COPY pip.conf /etc/pip.conf -# Add Pipfile -COPY Pipfile /app/Pipfile - # Do not include compiled byte-code ENV PIP_NO_COMPILE=1 \ PIP_NO_CACHE_DIR=1 \ @@ -297,10 +313,10 @@ ENV PIP_NO_COMPILE=1 \ WORKDIR /app # Set up the app -#BuildKit#RUN --mount=type=bind,source=Pipfile,target=/app/Pipfile \ -RUN \ +RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ + --mount=type=bind,source=Pipfile,target=/app/Pipfile \ set -x && \ - apt-get update && \ + { test -x /cache/apt.sh && /cache/apt.sh || apt-get update ; } && \ # Install required build packages apt-get -y --no-install-recommends install \ default-libmysqlclient-dev \ From edb086b1b22de0ebd838077ffa3bf24a8b2e3f21 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 01:47:15 -0500 Subject: [PATCH 103/220] fixup: quoting --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 459f292e..3e3eb82f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -229,8 +229,8 @@ RUN \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ printf -- '%s\n' >> "${file}" \ - 'tar -C /var/cache -xpf "${cache_dir}"/cache.tar ;' \ - 'tar -C /var/lib -xpf "${cache_dir}"/lib.tar ;' + 'tar -C /var/cache -xpf '"${cache_dir}"'/cache.tar ;' \ + 'tar -C /var/lib -xpf '"${cache_dir}"'/lib.tar ;' FROM debian:${DEBIAN_VERSION} AS tubesync From f518850554f034ea618151b6b31ac726e0b29612 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 01:51:40 -0500 Subject: [PATCH 104/220] Use relative paths --- Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3e3eb82f..297c58e2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -229,8 +229,9 @@ RUN \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ printf -- '%s\n' >> "${file}" \ - 'tar -C /var/cache -xpf '"${cache_dir}"'/cache.tar ;' \ - 'tar -C /var/lib -xpf '"${cache_dir}"'/lib.tar ;' + 'cd "$(dirname "$0")" ;' \ + 'tar -C /var/cache -xpf cache.tar ;' \ + 'tar -C /var/lib -xpf lib.tar ;' FROM debian:${DEBIAN_VERSION} AS tubesync From 1a139052cc10aa850f76975dd9d8e3d4a8ca50eb Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 03:13:00 -0500 Subject: [PATCH 105/220] Remove an unhelpful apt config file --- Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/Dockerfile b/Dockerfile index 297c58e2..ce9e5ea2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -220,6 +220,7 @@ COPY --from=s6-overlay-extracted /s6-overlay-rootfs / FROM debian:${DEBIAN_VERSION} AS cache-apt RUN \ set -eu ; \ + rm -f /etc/apt/apt.conf.d/docker-clean ; \ DEBIAN_FRONTEND="noninteractive" apt-get update ; \ cache_dir='/cache/apt' ; \ mkdir -v -p "${cache_dir}" ; \ From 153fffec05ec171edccd9a5b9b0a68f832d6a221 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 04:12:51 -0500 Subject: [PATCH 106/220] Cache most Debian packages --- Dockerfile | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/Dockerfile b/Dockerfile index ce9e5ea2..86d9df94 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,6 +19,8 @@ ARG FFMPEG_SUFFIX_FILE=".tar.xz" ARG FFMPEG_CHECKSUM_ALGORITHM="sha256" ARG S6_CHECKSUM_ALGORITHM="sha256" +ARG DEBIAN_PKGS="bin-utils ca-certificates curl file gcc g++ less locales make nginx-light pipenv python3-dev xz-utils" + FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE ARG FFMPEG_VERSION @@ -218,21 +220,25 @@ FROM scratch AS s6-overlay COPY --from=s6-overlay-extracted /s6-overlay-rootfs / FROM debian:${DEBIAN_VERSION} AS cache-apt +ARG DEBIAN_PKGS RUN \ set -eu ; \ rm -f /etc/apt/apt.conf.d/docker-clean ; \ - DEBIAN_FRONTEND="noninteractive" apt-get update ; \ + DEBIAN_FRONTEND="noninteractive"; export DEBIAN_FRONTEND ; \ + apt-get --quiet update ; \ + apt-get --quiet --assume-yes install --download-only --no-install-recommends \ + ${DEBIAN_PKGS} ; \ cache_dir='/cache/apt' ; \ mkdir -v -p "${cache_dir}" ; \ - tar -C /var/cache -cf "${cache_dir}"/cache.tar apt ; \ - tar -C /var/lib -cf "${cache_dir}"/lib.tar apt ; \ + tar -C /var/cache -cJf "${cache_dir}"/cache.tar.xz apt ; \ + tar -C /var/lib -cJf "${cache_dir}"/lib.tar.xz apt ; \ file="${cache_dir}/apt.sh" ; \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ printf -- '%s\n' >> "${file}" \ 'cd "$(dirname "$0")" ;' \ - 'tar -C /var/cache -xpf cache.tar ;' \ - 'tar -C /var/lib -xpf lib.tar ;' + 'tar -C /var/cache -xpf cache.tar.xz ;' \ + 'tar -C /var/lib -xpf lib.tar.xz ;' FROM debian:${DEBIAN_VERSION} AS tubesync From a144a797fc39541ad568de1c2eb90c350777051a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 04:17:56 -0500 Subject: [PATCH 107/220] Fixes from testing --- Dockerfile | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 86d9df94..e03eabc1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,7 +19,7 @@ ARG FFMPEG_SUFFIX_FILE=".tar.xz" ARG FFMPEG_CHECKSUM_ALGORITHM="sha256" ARG S6_CHECKSUM_ALGORITHM="sha256" -ARG DEBIAN_PKGS="bin-utils ca-certificates curl file gcc g++ less locales make nginx-light pipenv python3-dev xz-utils" +ARG DEBIAN_PKGS="binutils ca-certificates curl file gcc g++ less locales make nginx-light pipenv python3-dev xz-utils" FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE @@ -224,14 +224,14 @@ ARG DEBIAN_PKGS RUN \ set -eu ; \ rm -f /etc/apt/apt.conf.d/docker-clean ; \ - DEBIAN_FRONTEND="noninteractive"; export DEBIAN_FRONTEND ; \ - apt-get --quiet update ; \ - apt-get --quiet --assume-yes install --download-only --no-install-recommends \ + DEBIAN_FRONTEND='noninteractive'; export DEBIAN_FRONTEND ; \ + apt-get update && \ + apt-get --assume-yes install --download-only --no-install-recommends \ ${DEBIAN_PKGS} ; \ cache_dir='/cache/apt' ; \ mkdir -v -p "${cache_dir}" ; \ - tar -C /var/cache -cJf "${cache_dir}"/cache.tar.xz apt ; \ - tar -C /var/lib -cJf "${cache_dir}"/lib.tar.xz apt ; \ + tar -C /var/cache -cJf "${cache_dir}/cache.tar.xz" apt ; \ + tar -C /var/lib -cJf "${cache_dir}/lib.tar.xz" apt ; \ file="${cache_dir}/apt.sh" ; \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ From 4251bf527417dcb0fdd58225d9d4438e0038cd9b Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 04:22:07 -0500 Subject: [PATCH 108/220] xz is not in the image --- Dockerfile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index e03eabc1..2ad67828 100644 --- a/Dockerfile +++ b/Dockerfile @@ -230,15 +230,15 @@ RUN \ ${DEBIAN_PKGS} ; \ cache_dir='/cache/apt' ; \ mkdir -v -p "${cache_dir}" ; \ - tar -C /var/cache -cJf "${cache_dir}/cache.tar.xz" apt ; \ - tar -C /var/lib -cJf "${cache_dir}/lib.tar.xz" apt ; \ + tar -C /var/cache -cf "${cache_dir}/cache.tar" apt ; \ + tar -C /var/lib -cf "${cache_dir}/lib.tar" apt ; \ file="${cache_dir}/apt.sh" ; \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ printf -- '%s\n' >> "${file}" \ 'cd "$(dirname "$0")" ;' \ - 'tar -C /var/cache -xpf cache.tar.xz ;' \ - 'tar -C /var/lib -xpf lib.tar.xz ;' + 'tar -C /var/cache -xpf cache.tar ;' \ + 'tar -C /var/lib -xpf lib.tar ;' FROM debian:${DEBIAN_VERSION} AS tubesync From d3f2d7cb14e541bdefe60bcc37959bc8c3c8baca Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 04:30:30 -0500 Subject: [PATCH 109/220] Don't stop if /app/Pipfile cannot be removed --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 2ad67828..d90d07f3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -346,7 +346,7 @@ RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ cp -at /tmp/ "${HOME}" && \ PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \ # Clean up - rm /app/Pipfile && \ + rm -f /app/Pipfile && \ pipenv --clear && \ apt-get -y autoremove --purge \ default-libmysqlclient-dev \ From 959d7e80b8eb99c2fb3bef79665604c8ef4ae2ac Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 04:49:55 -0500 Subject: [PATCH 110/220] Don't try to remove/app/Pipfile at all --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index d90d07f3..a633734f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -346,7 +346,6 @@ RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ cp -at /tmp/ "${HOME}" && \ PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \ # Clean up - rm -f /app/Pipfile && \ pipenv --clear && \ apt-get -y autoremove --purge \ default-libmysqlclient-dev \ From 6174d4660aa3a4b880e3841e8405d371276ed432 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 04:57:27 -0500 Subject: [PATCH 111/220] Cache more Debian packages --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a633734f..ae16d3b1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,7 +19,7 @@ ARG FFMPEG_SUFFIX_FILE=".tar.xz" ARG FFMPEG_CHECKSUM_ALGORITHM="sha256" ARG S6_CHECKSUM_ALGORITHM="sha256" -ARG DEBIAN_PKGS="binutils ca-certificates curl file gcc g++ less locales make nginx-light pipenv python3-dev xz-utils" +ARG DEBIAN_PKGS="binutils ca-certificates curl file gcc g++ less libjpeg-dev libwebp-dev locales make nginx-light pipenv python3-dev xz-utils" FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE From abd82987898b72d2f68152b63f446cd39979504a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 05:12:43 -0500 Subject: [PATCH 112/220] Cache for pipenv & pip --- Dockerfile | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index ae16d3b1..09e89523 100644 --- a/Dockerfile +++ b/Dockerfile @@ -321,8 +321,11 @@ ENV PIP_NO_COMPILE=1 \ WORKDIR /app # Set up the app -RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ +RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ + --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ + --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ + unset -v PIP_NO_CACHE_DIR ; \ set -x && \ { test -x /cache/apt.sh && /cache/apt.sh || apt-get update ; } && \ # Install required build packages @@ -344,9 +347,12 @@ RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ useradd -M -d /app -s /bin/false -g app app && \ # Install non-distro packages cp -at /tmp/ "${HOME}" && \ - PIPENV_VERBOSITY=64 HOME="/tmp/${HOME#/}" pipenv install --system --skip-lock && \ + HOME="/tmp/${HOME#/}" \ + PIPENV_VERBOSITY=64 \ + PIP_CACHE_DIR='/cache/pip' \ + PIPENV_CACHE_DIR='/cache/pipenv' \ + pipenv install --system --skip-lock && \ # Clean up - pipenv --clear && \ apt-get -y autoremove --purge \ default-libmysqlclient-dev \ g++ \ From 0446bc5213d06c52775e1b4743c6463469a9c5a9 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 05:16:05 -0500 Subject: [PATCH 113/220] Adjust for the new mount point --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 09e89523..0fdc9a71 100644 --- a/Dockerfile +++ b/Dockerfile @@ -327,7 +327,7 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ - { test -x /cache/apt.sh && /cache/apt.sh || apt-get update ; } && \ + { test -x /cache/apt/apt.sh && /cache/apt/apt.sh || apt-get update ; } && \ # Install required build packages apt-get -y --no-install-recommends install \ default-libmysqlclient-dev \ From 7ffc2d8f7d91b18b4255763f05536b19987d9392 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 05:47:28 -0500 Subject: [PATCH 114/220] Keep update to grab the latest Debian packages The cache layer in docker could have really out-of-date versions, we want to use the latest available. When the cache stage is up-to-date, we save time and bandwidth. --- Dockerfile | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0fdc9a71..6cf259ac 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,7 +19,11 @@ ARG FFMPEG_SUFFIX_FILE=".tar.xz" ARG FFMPEG_CHECKSUM_ALGORITHM="sha256" ARG S6_CHECKSUM_ALGORITHM="sha256" -ARG DEBIAN_PKGS="binutils ca-certificates curl file gcc g++ less libjpeg-dev libwebp-dev locales make nginx-light pipenv python3-dev xz-utils" +ARG DEBIAN_PKGS="\ + binutils build-essential ca-certificates curl file gcc g++ less \ + libjpeg-dev libjson-perl libssl-dev libwebp-dev locales make \ + nginx-light pipenv python3-dev python3-pip python3-setuptools xz-utils \ + " FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE @@ -269,7 +273,8 @@ COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ set -x && \ - { test -x /cache/apt.sh && /cache/apt.sh || apt-get update ; } && \ + { test -x /cache/apt.sh && /cache/apt.sh ; } && \ + apt-get update && \ apt-get -y --no-install-recommends install locales && \ printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \ locale-gen en_US.UTF-8 && \ @@ -289,7 +294,8 @@ RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ # Install dependencies we keep RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ set -x && \ - { test -x /cache/apt.sh && /cache/apt.sh || apt-get update ; } && \ + { test -x /cache/apt.sh && /cache/apt.sh ; } && \ + apt-get update && \ # Install required distro packages apt-get -y --no-install-recommends install \ libjpeg62-turbo \ @@ -327,7 +333,8 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ - { test -x /cache/apt/apt.sh && /cache/apt/apt.sh || apt-get update ; } && \ + { test -x /cache/apt/apt.sh && /cache/apt/apt.sh ; } && \ + apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ default-libmysqlclient-dev \ From 4c16308a85cd0ebe3355a72c2b4e754825d5537a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 05:52:51 -0500 Subject: [PATCH 115/220] Revert Show resulting files --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 6cf259ac..3af4674a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -107,7 +107,7 @@ RUN set -eu ; \ \ mkdir -v -p "/verified/${TARGETARCH}" ; \ ln -v "${FFMPEG_PREFIX_FILE}"*-"${FFMPEG_ARCH}"-*"${FFMPEG_SUFFIX_FILE}" "/verified/${TARGETARCH}/" ; \ - rm -rf "${DESTDIR}" ; ls -alR /*ed ; + rm -rf "${DESTDIR}" ; FROM alpine:${ALPINE_VERSION} AS ffmpeg-extracted COPY --from=ffmpeg-download /verified /verified From 5443432dce0c80f7d0335d79133c8f77ae79d163 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 06:12:24 -0500 Subject: [PATCH 116/220] We don't want update to remove our cached packages --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 3af4674a..89331d0a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -240,6 +240,8 @@ RUN \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ printf -- '%s\n' >> "${file}" \ + 'rm -f /etc/apt/apt.conf.d/docker-clean ;' \ + 'set -e ;' \ 'cd "$(dirname "$0")" ;' \ 'tar -C /var/cache -xpf cache.tar ;' \ 'tar -C /var/lib -xpf lib.tar ;' From 3c827c5d94a81a297026b8929b86f4e690d9247c Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 06:36:10 -0500 Subject: [PATCH 117/220] Only cache one copy of the packages --- Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 89331d0a..59bb8b6a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -225,7 +225,7 @@ COPY --from=s6-overlay-extracted /s6-overlay-rootfs / FROM debian:${DEBIAN_VERSION} AS cache-apt ARG DEBIAN_PKGS -RUN \ +RUN --mount=type=cache,id=apt-cache,sharing=locked,target=/cache/apt \ set -eu ; \ rm -f /etc/apt/apt.conf.d/docker-clean ; \ DEBIAN_FRONTEND='noninteractive'; export DEBIAN_FRONTEND ; \ @@ -236,6 +236,7 @@ RUN \ mkdir -v -p "${cache_dir}" ; \ tar -C /var/cache -cf "${cache_dir}/cache.tar" apt ; \ tar -C /var/lib -cf "${cache_dir}/lib.tar" apt ; \ + apt-get --assume-yes clean ; \ file="${cache_dir}/apt.sh" ; \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ From 32ea28f7b20536f4deb897509abe7adc78cc5458 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 06:42:38 -0500 Subject: [PATCH 118/220] Update apt-cache readers --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 59bb8b6a..92bd0129 100644 --- a/Dockerfile +++ b/Dockerfile @@ -274,7 +274,7 @@ COPY --from=s6-overlay / / COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables -RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ +RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ set -x && \ { test -x /cache/apt.sh && /cache/apt.sh ; } && \ apt-get update && \ @@ -295,7 +295,7 @@ RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ rm -rf /tmp/* # Install dependencies we keep -RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ +RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ set -x && \ { test -x /cache/apt.sh && /cache/apt.sh ; } && \ apt-get update && \ @@ -332,7 +332,7 @@ WORKDIR /app # Set up the app RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ - --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache/apt \ + --mount=type=cache,id=apt-cache,readonly,target=/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ From 3909e922c0a47ec52d59166fa3afcd46a27aab26 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 06:58:37 -0500 Subject: [PATCH 119/220] The cache may not exist --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 92bd0129..a76f9e92 100644 --- a/Dockerfile +++ b/Dockerfile @@ -276,7 +276,7 @@ COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ set -x && \ - { test -x /cache/apt.sh && /cache/apt.sh ; } && \ + { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ apt-get update && \ apt-get -y --no-install-recommends install locales && \ printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \ @@ -297,7 +297,7 @@ RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ # Install dependencies we keep RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ set -x && \ - { test -x /cache/apt.sh && /cache/apt.sh ; } && \ + { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ apt-get update && \ # Install required distro packages apt-get -y --no-install-recommends install \ @@ -336,7 +336,7 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ - { test -x /cache/apt/apt.sh && /cache/apt/apt.sh ; } && \ + { test -x /cache/apt/apt.sh && /cache/apt/apt.sh || : ; } && \ apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ From f152cf99d24d90ce4a4cb0d299a58a89831aa950 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 07:07:09 -0500 Subject: [PATCH 120/220] Use a private cache to ensure it is built --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index a76f9e92..fa1a9bea 100644 --- a/Dockerfile +++ b/Dockerfile @@ -225,7 +225,7 @@ COPY --from=s6-overlay-extracted /s6-overlay-rootfs / FROM debian:${DEBIAN_VERSION} AS cache-apt ARG DEBIAN_PKGS -RUN --mount=type=cache,id=apt-cache,sharing=locked,target=/cache/apt \ +RUN --mount=type=cache,id=apt-cache,sharing=private,target=/cache/apt \ set -eu ; \ rm -f /etc/apt/apt.conf.d/docker-clean ; \ DEBIAN_FRONTEND='noninteractive'; export DEBIAN_FRONTEND ; \ From f0822f1555c9740509edddcdfcd3ea9b844330d7 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 07:21:42 -0500 Subject: [PATCH 121/220] Use from for readers --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index fa1a9bea..55cbd9ab 100644 --- a/Dockerfile +++ b/Dockerfile @@ -274,7 +274,7 @@ COPY --from=s6-overlay / / COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables -RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ +RUN --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ set -x && \ { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ apt-get update && \ @@ -295,7 +295,7 @@ RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ rm -rf /tmp/* # Install dependencies we keep -RUN --mount=type=cache,id=apt-cache,readonly,target=/cache \ +RUN --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ set -x && \ { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ apt-get update && \ @@ -332,7 +332,7 @@ WORKDIR /app # Set up the app RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ - --mount=type=cache,id=apt-cache,readonly,target=/cache/apt \ + --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ From c409ac51e747e45954c98c4bebaa225d333356a7 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 07:28:55 -0500 Subject: [PATCH 122/220] Store the tar archives in the layer --- Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 55cbd9ab..4de2d3e0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -225,7 +225,8 @@ COPY --from=s6-overlay-extracted /s6-overlay-rootfs / FROM debian:${DEBIAN_VERSION} AS cache-apt ARG DEBIAN_PKGS -RUN --mount=type=cache,id=apt-cache,sharing=private,target=/cache/apt \ +RUN --mount=type=cache,id=apt-lib,sharing=locked,target=/var/lib/apt \ + --mount=type=cache,id=apt-cache,sharing=locked,target=/var/cache/apt \ set -eu ; \ rm -f /etc/apt/apt.conf.d/docker-clean ; \ DEBIAN_FRONTEND='noninteractive'; export DEBIAN_FRONTEND ; \ @@ -236,7 +237,7 @@ RUN --mount=type=cache,id=apt-cache,sharing=private,target=/cache/apt \ mkdir -v -p "${cache_dir}" ; \ tar -C /var/cache -cf "${cache_dir}/cache.tar" apt ; \ tar -C /var/lib -cf "${cache_dir}/lib.tar" apt ; \ - apt-get --assume-yes clean ; \ + apt-get --assume-yes autoclean ; \ file="${cache_dir}/apt.sh" ; \ printf -- '#!/bin/sh\n\n' >| "${file}" ; \ chmod -v a+rx "${file}" ; \ From 69062e0051bf9d8c1aacf75396de172ce01f3266 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 07:34:58 -0500 Subject: [PATCH 123/220] Remove I'd from readers --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4de2d3e0..9c4d1aab 100644 --- a/Dockerfile +++ b/Dockerfile @@ -275,7 +275,7 @@ COPY --from=s6-overlay / / COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables -RUN --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ +RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ set -x && \ { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ apt-get update && \ @@ -296,7 +296,7 @@ RUN --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,ta rm -rf /tmp/* # Install dependencies we keep -RUN --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ +RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ set -x && \ { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ apt-get update && \ @@ -333,7 +333,7 @@ WORKDIR /app # Set up the app RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ - --mount=type=cache,id=apt-cache,readonly,from=cache-apt,source=/cache/apt,target=/cache/apt \ + --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ From 937cb77a4c40babfc06964aac4f03841ee945730 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 08:34:59 -0500 Subject: [PATCH 124/220] Clean up tubesync stage --- Dockerfile | 54 ++++++++++++++++++++---------------------------------- 1 file changed, 20 insertions(+), 34 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9c4d1aab..551fba38 100644 --- a/Dockerfile +++ b/Dockerfile @@ -250,9 +250,6 @@ RUN --mount=type=cache,id=apt-lib,sharing=locked,target=/var/lib/apt \ FROM debian:${DEBIAN_VERSION} AS tubesync -ARG TARGETARCH -ARG TARGETPLATFORM - ARG S6_VERSION ARG FFMPEG_DATE @@ -277,29 +274,26 @@ COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ set -x && \ - { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ + # Create a 'app' user which the application will run as + groupadd app && \ + useradd -M -d /app -s /bin/false -g app app && \ + # Update from the cache and/or the network + { test '!' -x /cache/apt.sh || /cache/apt.sh ; } && \ apt-get update && \ + # Install locales apt-get -y --no-install-recommends install locales && \ printf -- "en_US.UTF-8 UTF-8\n" > /etc/locale.gen && \ locale-gen en_US.UTF-8 && \ - # Install required distro packages - apt-get -y --no-install-recommends install curl ca-certificates file binutils xz-utils && \ + # Install file + apt-get -y --no-install-recommends install file && \ # Installed s6 (using COPY earlier) file -L /command/s6-overlay-suexec && \ # Installed ffmpeg (using COPY earlier) /usr/local/bin/ffmpeg -version && \ file /usr/local/bin/ff* && \ - # Clean up - apt-get -y autoremove --purge file binutils xz-utils && \ - rm -rf /var/lib/apt/lists/* && \ - rm -rf /var/cache/apt/* && \ - rm -rf /tmp/* - -# Install dependencies we keep -RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ - set -x && \ - { test -x /cache/apt.sh && /cache/apt.sh || : ; } && \ - apt-get update && \ + # Clean up file + apt-get -y autoremove --purge file && \ + # Install dependencies we keep # Install required distro packages apt-get -y --no-install-recommends install \ libjpeg62-turbo \ @@ -314,9 +308,10 @@ RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ redis-server \ curl \ less \ - && apt-get -y autoclean && \ - rm -rf /var/lib/apt/lists/* && \ - rm -rf /var/cache/apt/* && \ + && \ + # Clean up + apt-get -y autopurge && \ + apt-get -y clean && \ rm -rf /tmp/* # Copy over pip.conf to use piwheels @@ -337,7 +332,7 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ - { test -x /cache/apt/apt.sh && /cache/apt/apt.sh || : ; } && \ + { test '!' -x /cache/apt/apt.sh || /cache/apt/apt.sh ; } && \ apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ @@ -353,9 +348,6 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ python3-pip \ zlib1g-dev \ && \ - # Create a 'app' user which the application will run as - groupadd app && \ - useradd -M -d /app -s /bin/false -g app app && \ # Install non-distro packages cp -at /tmp/ "${HOME}" && \ HOME="/tmp/${HOME#/}" \ @@ -377,13 +369,10 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ python3-pip \ zlib1g-dev \ && \ - apt-get -y autoremove && \ - apt-get -y autoclean && \ - rm -rf /var/lib/apt/lists/* && \ - rm -rf /var/cache/apt/* && \ + apt-get -y autopurge && \ + apt-get -y clean && \ rm -rf /tmp/* - # Copy app COPY tubesync /app COPY tubesync/tubesync/local_settings.py.container /app/tubesync/local_settings.py @@ -400,11 +389,8 @@ RUN set -x && \ mkdir -v -p /config/media && \ mkdir -v -p /config/cache/pycache && \ mkdir -v -p /downloads/audio && \ - mkdir -v -p /downloads/video - - -# Append software versions -RUN set -x && \ + mkdir -v -p /downloads/video \ + # Append software versions ffmpeg_version=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \ test -n "${ffmpeg_version}" && \ printf -- "ffmpeg_version = '%s'\n" "${ffmpeg_version}" >> /app/common/third_party_versions.py From 5d944542d71ff505e5bfc5c3df48b947b4d6e136 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 08:41:16 -0500 Subject: [PATCH 125/220] fixup: add missing && --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 551fba38..59f1818a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -389,7 +389,7 @@ RUN set -x && \ mkdir -v -p /config/media && \ mkdir -v -p /config/cache/pycache && \ mkdir -v -p /downloads/audio && \ - mkdir -v -p /downloads/video \ + mkdir -v -p /downloads/video && \ # Append software versions ffmpeg_version=$(/usr/local/bin/ffmpeg -version | awk -v 'ev=31' '1 == NR && "ffmpeg" == $1 { print $3; ev=0; } END { exit ev; }') && \ test -n "${ffmpeg_version}" && \ From 785c7b6a7b904ee28a70a9b115d7b8b1c71a8049 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 09:06:32 -0500 Subject: [PATCH 126/220] Move apt cache out of the stage --- Dockerfile | 46 +++++++++------------------------------------- 1 file changed, 9 insertions(+), 37 deletions(-) diff --git a/Dockerfile b/Dockerfile index 59f1818a..68d25d3b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -19,11 +19,6 @@ ARG FFMPEG_SUFFIX_FILE=".tar.xz" ARG FFMPEG_CHECKSUM_ALGORITHM="sha256" ARG S6_CHECKSUM_ALGORITHM="sha256" -ARG DEBIAN_PKGS="\ - binutils build-essential ca-certificates curl file gcc g++ less \ - libjpeg-dev libjson-perl libssl-dev libwebp-dev locales make \ - nginx-light pipenv python3-dev python3-pip python3-setuptools xz-utils \ - " FROM alpine:${ALPINE_VERSION} AS ffmpeg-download ARG FFMPEG_DATE @@ -223,31 +218,6 @@ RUN set -eu ; \ FROM scratch AS s6-overlay COPY --from=s6-overlay-extracted /s6-overlay-rootfs / -FROM debian:${DEBIAN_VERSION} AS cache-apt -ARG DEBIAN_PKGS -RUN --mount=type=cache,id=apt-lib,sharing=locked,target=/var/lib/apt \ - --mount=type=cache,id=apt-cache,sharing=locked,target=/var/cache/apt \ - set -eu ; \ - rm -f /etc/apt/apt.conf.d/docker-clean ; \ - DEBIAN_FRONTEND='noninteractive'; export DEBIAN_FRONTEND ; \ - apt-get update && \ - apt-get --assume-yes install --download-only --no-install-recommends \ - ${DEBIAN_PKGS} ; \ - cache_dir='/cache/apt' ; \ - mkdir -v -p "${cache_dir}" ; \ - tar -C /var/cache -cf "${cache_dir}/cache.tar" apt ; \ - tar -C /var/lib -cf "${cache_dir}/lib.tar" apt ; \ - apt-get --assume-yes autoclean ; \ - file="${cache_dir}/apt.sh" ; \ - printf -- '#!/bin/sh\n\n' >| "${file}" ; \ - chmod -v a+rx "${file}" ; \ - printf -- '%s\n' >> "${file}" \ - 'rm -f /etc/apt/apt.conf.d/docker-clean ;' \ - 'set -e ;' \ - 'cd "$(dirname "$0")" ;' \ - 'tar -C /var/cache -xpf cache.tar ;' \ - 'tar -C /var/lib -xpf lib.tar ;' - FROM debian:${DEBIAN_VERSION} AS tubesync ARG S6_VERSION @@ -272,13 +242,14 @@ COPY --from=s6-overlay / / COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ # Reminder: the SHELL handles all variables -RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ +RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ + --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ set -x && \ # Create a 'app' user which the application will run as groupadd app && \ useradd -M -d /app -s /bin/false -g app app && \ - # Update from the cache and/or the network - { test '!' -x /cache/apt.sh || /cache/apt.sh ; } && \ + # Update from the network and keep cache + rm -f /etc/apt/apt.conf.d/docker-clean ; \ apt-get update && \ # Install locales apt-get -y --no-install-recommends install locales && \ @@ -311,7 +282,7 @@ RUN --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache \ && \ # Clean up apt-get -y autopurge && \ - apt-get -y clean && \ + apt-get -y autoclean && \ rm -rf /tmp/* # Copy over pip.conf to use piwheels @@ -328,11 +299,12 @@ WORKDIR /app # Set up the app RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ - --mount=type=cache,readonly,from=cache-apt,source=/cache/apt,target=/cache/apt \ + --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ + --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ - { test '!' -x /cache/apt/apt.sh || /cache/apt/apt.sh ; } && \ + rm -f /etc/apt/apt.conf.d/docker-clean ; \ apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ @@ -370,7 +342,7 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ zlib1g-dev \ && \ apt-get -y autopurge && \ - apt-get -y clean && \ + apt-get -y autoclean && \ rm -rf /tmp/* # Copy app From 44eca46e429da30392110021d9513352fa0b5d55 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 09:21:27 -0500 Subject: [PATCH 127/220] Move user creation --- Dockerfile | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 68d25d3b..38dfe8e5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -245,9 +245,6 @@ COPY --from=ffmpeg /usr/local/bin/ /usr/local/bin/ RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ set -x && \ - # Create a 'app' user which the application will run as - groupadd app && \ - useradd -M -d /app -s /bin/false -g app app && \ # Update from the network and keep cache rm -f /etc/apt/apt.conf.d/docker-clean ; \ apt-get update && \ @@ -304,6 +301,10 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ unset -v PIP_NO_CACHE_DIR ; \ set -x && \ + # Create a 'app' user which the application will run as + groupadd app && \ + useradd -M -d /app -s /bin/false -g app app && \ + # Update from the network and keep cache rm -f /etc/apt/apt.conf.d/docker-clean ; \ apt-get update && \ # Install required build packages From 35aab6fbb1163ce10c267ca33395455ad595de98 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 09:28:52 -0500 Subject: [PATCH 128/220] fixup: rm lines using && --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 38dfe8e5..82282c58 100644 --- a/Dockerfile +++ b/Dockerfile @@ -246,7 +246,7 @@ RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ set -x && \ # Update from the network and keep cache - rm -f /etc/apt/apt.conf.d/docker-clean ; \ + rm -f /etc/apt/apt.conf.d/docker-clean && \ apt-get update && \ # Install locales apt-get -y --no-install-recommends install locales && \ @@ -305,7 +305,7 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ groupadd app && \ useradd -M -d /app -s /bin/false -g app app && \ # Update from the network and keep cache - rm -f /etc/apt/apt.conf.d/docker-clean ; \ + rm -f /etc/apt/apt.conf.d/docker-clean && \ apt-get update && \ # Install required build packages apt-get -y --no-install-recommends install \ From 698e19b46e0af230c603fe696772d8643d538154 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 09:37:06 -0500 Subject: [PATCH 129/220] Do not cache pip --- Dockerfile | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/Dockerfile b/Dockerfile index 82282c58..315b1d7c 100644 --- a/Dockerfile +++ b/Dockerfile @@ -294,12 +294,10 @@ ENV PIP_NO_COMPILE=1 \ WORKDIR /app # Set up the app -RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ - --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ +RUN --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ - unset -v PIP_NO_CACHE_DIR ; \ set -x && \ # Create a 'app' user which the application will run as groupadd app && \ @@ -325,7 +323,6 @@ RUN --mount=type=cache,id=pip-cache,sharing=locked,target=/cache/pip \ cp -at /tmp/ "${HOME}" && \ HOME="/tmp/${HOME#/}" \ PIPENV_VERBOSITY=64 \ - PIP_CACHE_DIR='/cache/pip' \ PIPENV_CACHE_DIR='/cache/pipenv' \ pipenv install --system --skip-lock && \ # Clean up From 1a5ddcb75315e5e191529216845c403586713b14 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 09:51:53 -0500 Subject: [PATCH 130/220] Use tmpfs for /cache --- Dockerfile | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 315b1d7c..4f6d76be 100644 --- a/Dockerfile +++ b/Dockerfile @@ -287,14 +287,14 @@ COPY pip.conf /etc/pip.conf # Do not include compiled byte-code ENV PIP_NO_COMPILE=1 \ - PIP_NO_CACHE_DIR=1 \ PIP_ROOT_USER_ACTION='ignore' # Switch workdir to the the app WORKDIR /app # Set up the app -RUN --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ +RUN --mount=type=tmpfs,target=/cache \ + --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ @@ -322,6 +322,7 @@ RUN --mount=type=cache,id=pipenv-cache,sharing=locked,target=/cache/pipenv \ # Install non-distro packages cp -at /tmp/ "${HOME}" && \ HOME="/tmp/${HOME#/}" \ + XDG_CACHE_HOME='/cache' \ PIPENV_VERBOSITY=64 \ PIPENV_CACHE_DIR='/cache/pipenv' \ pipenv install --system --skip-lock && \ From fb6adc5b58966e0885f9d1164af8a88da3c6d84a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 10:52:24 -0500 Subject: [PATCH 131/220] Consolidate ENV layers --- Dockerfile | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4f6d76be..8f7ce862 100644 --- a/Dockerfile +++ b/Dockerfile @@ -225,17 +225,20 @@ ARG S6_VERSION ARG FFMPEG_DATE ARG FFMPEG_VERSION -ENV S6_VERSION="${S6_VERSION}" \ - FFMPEG_DATE="${FFMPEG_DATE}" \ - FFMPEG_VERSION="${FFMPEG_VERSION}" - ENV DEBIAN_FRONTEND="noninteractive" \ - HOME="/root" \ - LANGUAGE="en_US.UTF-8" \ - LANG="en_US.UTF-8" \ - LC_ALL="en_US.UTF-8" \ - TERM="xterm" \ - S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0" + HOME="/root" \ + LANGUAGE="en_US.UTF-8" \ + LANG="en_US.UTF-8" \ + LC_ALL="en_US.UTF-8" \ + TERM="xterm" \ + # Do not include compiled byte-code + PIP_NO_COMPILE=1 \ + PIP_ROOT_USER_ACTION='ignore' \ + S6_CMD_WAIT_FOR_SERVICES_MAXTIME="0" + +ENV S6_VERSION="${S6_VERSION}" \ + FFMPEG_DATE="${FFMPEG_DATE}" \ + FFMPEG_VERSION="${FFMPEG_VERSION}" # Install third party software COPY --from=s6-overlay / / @@ -285,10 +288,6 @@ RUN --mount=type=cache,id=apt-lib-cache,sharing=locked,target=/var/lib/apt \ # Copy over pip.conf to use piwheels COPY pip.conf /etc/pip.conf -# Do not include compiled byte-code -ENV PIP_NO_COMPILE=1 \ - PIP_ROOT_USER_ACTION='ignore' - # Switch workdir to the the app WORKDIR /app @@ -323,8 +322,8 @@ RUN --mount=type=tmpfs,target=/cache \ cp -at /tmp/ "${HOME}" && \ HOME="/tmp/${HOME#/}" \ XDG_CACHE_HOME='/cache' \ + # PIPENV_CACHE_DIR='/cache/pipenv' PIPENV_VERBOSITY=64 \ - PIPENV_CACHE_DIR='/cache/pipenv' \ pipenv install --system --skip-lock && \ # Clean up apt-get -y autoremove --purge \ @@ -342,7 +341,7 @@ RUN --mount=type=tmpfs,target=/cache \ && \ apt-get -y autopurge && \ apt-get -y autoclean && \ - rm -rf /tmp/* + rm -v -rf /tmp/* # Copy app COPY tubesync /app @@ -373,7 +372,8 @@ COPY config/root / HEALTHCHECK --interval=1m --timeout=10s --start-period=3m CMD ["/app/healthcheck.py", "http://127.0.0.1:8080/healthcheck"] # ENVS and ports -ENV PYTHONPATH="/app" PYTHONPYCACHEPREFIX="/config/cache/pycache" +ENV PYTHONPATH="/app" \ + PYTHONPYCACHEPREFIX="/config/cache/pycache" EXPOSE 4848 # Volumes From 99c4f7ba6264dd7e2b3258e24288981dec60edd7 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 11:06:37 -0500 Subject: [PATCH 132/220] Undo move of user creation --- Dockerfile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8f7ce862..342a731b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -298,9 +298,6 @@ RUN --mount=type=tmpfs,target=/cache \ --mount=type=cache,id=apt-cache-cache,sharing=locked,target=/var/cache/apt \ --mount=type=bind,source=Pipfile,target=/app/Pipfile \ set -x && \ - # Create a 'app' user which the application will run as - groupadd app && \ - useradd -M -d /app -s /bin/false -g app app && \ # Update from the network and keep cache rm -f /etc/apt/apt.conf.d/docker-clean && \ apt-get update && \ @@ -318,6 +315,9 @@ RUN --mount=type=tmpfs,target=/cache \ python3-pip \ zlib1g-dev \ && \ + # Create a 'app' user which the application will run as + groupadd app && \ + useradd -M -d /app -s /bin/false -g app app && \ # Install non-distro packages cp -at /tmp/ "${HOME}" && \ HOME="/tmp/${HOME#/}" \ From df2f7c814a30e834f0a65fcf31c7f3f175ca1d9b Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 17 Jan 2025 11:11:19 -0500 Subject: [PATCH 133/220] Remove PIPENV_CACHE_DIR --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 342a731b..6df1bc71 100644 --- a/Dockerfile +++ b/Dockerfile @@ -322,7 +322,6 @@ RUN --mount=type=tmpfs,target=/cache \ cp -at /tmp/ "${HOME}" && \ HOME="/tmp/${HOME#/}" \ XDG_CACHE_HOME='/cache' \ - # PIPENV_CACHE_DIR='/cache/pipenv' PIPENV_VERBOSITY=64 \ pipenv install --system --skip-lock && \ # Clean up From 18e8888a8aedfab440e8ea5920d8cdc26f91642a Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 27 Jan 2025 04:46:24 -0500 Subject: [PATCH 134/220] Update ci.yaml --- .github/workflows/ci.yaml | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8e875e27..e2b501e2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -47,23 +47,22 @@ jobs: gh api repos/:owner/:repo --jq .parent.full_name ; printf -- '%s\n' "${delim}" ; } >> "$GITHUB_ENV" + gh api repos/:owner/:repo --jq .parent || : - name: Upstream registry ref id: upstream run: | user="$(printf -- '%s\n' "${GH_UPSTREAM_SLUG}" | cut -d '/' -f 1)" user_lowercase="$(printf -- '%s\n' "${user}" | awk '{print tolower($0);}')" ; - printf -- 'ref=ghcr.io/%s/%s:latest\n' \ - "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; - printf -- 'tag=ghcr.io/%s/%s:latest\n' \ - "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; + printf >> "$GITHUB_OUTPUT" -- '%s=ghcr.io/%s/%s:latest\n' \ + ref "${user_lowercase}" "${IMAGE_NAME}" \ + tag "${user_lowercase}" "${IMAGE_NAME}" ; - name: Registry ref id: origin run: | user_lowercase="$(printf -- '%s\n' "${GITHUB_ACTOR}" | awk '{print tolower($0);}')" ; - printf -- 'ref=ghcr.io/%s/%s:latest\n' \ - "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; - printf -- 'tag=ghcr.io/%s/%s:latest\n' \ - "${user_lowercase}" "${IMAGE_NAME}" >> "$GITHUB_OUTPUT" ; + printf >> "$GITHUB_OUTPUT" -- '%s=ghcr.io/%s/%s:latest\n' \ + ref "${user_lowercase}" "${IMAGE_NAME}" \ + tag "${user_lowercase}" "${IMAGE_NAME}" ; - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -79,6 +78,6 @@ jobs: cache-from: | type=registry,ref=${{ steps.upstream.outputs.ref }} type=registry,ref=${{ steps.origin.outputs.ref }} - cache-to: type=inline + cache-to: type=registry,ref=${{ steps.origin.outputs.ref }},mode=max build-args: | IMAGE_NAME=${{ env.IMAGE_NAME }} From 917b27aeb1887c18b5a12243aeb460286abedda2 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 27 Jan 2025 06:00:52 -0500 Subject: [PATCH 135/220] Update ci.yaml --- .github/workflows/ci.yaml | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e2b501e2..ea417e22 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -36,23 +36,27 @@ jobs: steps: - name: Checkout uses: actions/checkout@v4 - - name: Find upstream GitHub slug + - name: Find upstream GitHub owner env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | + GH_UPSTREAM_OWNER='.parent.owner.login' ; + GH_UPSTREAM_REPO='.parent.name' ; + GH_UPSTREAM_SLUG='.parent.full_name' ; { - var='GH_UPSTREAM_SLUG' ; - delim='"'"${var}"'_EOF"' ; - printf -- '%s<<%s\n' "${var}" "${delim}" ; - gh api repos/:owner/:repo --jq .parent.full_name ; - printf -- '%s\n' "${delim}" ; + for var in GH_UPSTREAM_OWNER GH_UPSTREAM_REPO GH_UPSTREAM_SLUG + do + jq_arg="$( eval printf -- "'%s\n'" '"${'"${var}"'}"' )" + delim='"'"${var}"'_EOF"' ; + printf -- '%s<<%s\n' "${var}" "${delim}" ; + gh api repos/:owner/:repo --cache 5m --jq "${jq_arg}" ; + printf -- '%s\n' "${delim}" ; + done } >> "$GITHUB_ENV" - gh api repos/:owner/:repo --jq .parent || : - name: Upstream registry ref id: upstream run: | - user="$(printf -- '%s\n' "${GH_UPSTREAM_SLUG}" | cut -d '/' -f 1)" - user_lowercase="$(printf -- '%s\n' "${user}" | awk '{print tolower($0);}')" ; + user_lowercase="$(printf -- '%s\n' "${GH_UPSTREAM_OWNER}" | awk '{print tolower($0);}')" ; printf >> "$GITHUB_OUTPUT" -- '%s=ghcr.io/%s/%s:latest\n' \ ref "${user_lowercase}" "${IMAGE_NAME}" \ tag "${user_lowercase}" "${IMAGE_NAME}" ; From 97193b6866ead14dcef6e9363748c7f482afff71 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 27 Jan 2025 06:33:42 -0500 Subject: [PATCH 136/220] Update ci.yaml --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ea417e22..e4d53b78 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -46,6 +46,7 @@ jobs: { for var in GH_UPSTREAM_OWNER GH_UPSTREAM_REPO GH_UPSTREAM_SLUG do + # jq_arg="$( eval printf -- "'%s\n'" "$(printf '"${%s}"' "${var}")" )" jq_arg="$( eval printf -- "'%s\n'" '"${'"${var}"'}"' )" delim='"'"${var}"'_EOF"' ; printf -- '%s<<%s\n' "${var}" "${delim}" ; From 3fe3189f7ba40d99c7a243a0da1b3c37303e0850 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 27 Jan 2025 06:37:47 -0500 Subject: [PATCH 137/220] Update ci.yaml --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e4d53b78..3150acab 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -46,7 +46,7 @@ jobs: { for var in GH_UPSTREAM_OWNER GH_UPSTREAM_REPO GH_UPSTREAM_SLUG do - # jq_arg="$( eval printf -- "'%s\n'" "$(printf '"${%s}"' "${var}")" )" + # jq_arg="$( eval printf -- "'%s\n'" "$(printf -- '"${%s}"' "${var}")" )" jq_arg="$( eval printf -- "'%s\n'" '"${'"${var}"'}"' )" delim='"'"${var}"'_EOF"' ; printf -- '%s<<%s\n' "${var}" "${delim}" ; From 31e2ed56034149e3b59be3c10945ea780bc4ba91 Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 27 Jan 2025 09:45:17 -0500 Subject: [PATCH 138/220] Update ci.yaml --- .github/workflows/ci.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3150acab..74b06297 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -44,7 +44,7 @@ jobs: GH_UPSTREAM_REPO='.parent.name' ; GH_UPSTREAM_SLUG='.parent.full_name' ; { - for var in GH_UPSTREAM_OWNER GH_UPSTREAM_REPO GH_UPSTREAM_SLUG + for var in GH_UPSTREAM_OWNER # GH_UPSTREAM_REPO GH_UPSTREAM_SLUG do # jq_arg="$( eval printf -- "'%s\n'" "$(printf -- '"${%s}"' "${var}")" )" jq_arg="$( eval printf -- "'%s\n'" '"${'"${var}"'}"' )" @@ -53,6 +53,7 @@ jobs: gh api repos/:owner/:repo --cache 5m --jq "${jq_arg}" ; printf -- '%s\n' "${delim}" ; done + unset -v delim jq_arg var } >> "$GITHUB_ENV" - name: Upstream registry ref id: upstream From 9a1744b09814f0b05b17cce00c85916737edbbba Mon Sep 17 00:00:00 2001 From: tcely Date: Mon, 27 Jan 2025 14:03:42 -0500 Subject: [PATCH 139/220] Use cache for ref --- .github/workflows/ci.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 74b06297..4d6ed655 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -66,9 +66,9 @@ jobs: id: origin run: | user_lowercase="$(printf -- '%s\n' "${GITHUB_ACTOR}" | awk '{print tolower($0);}')" ; - printf >> "$GITHUB_OUTPUT" -- '%s=ghcr.io/%s/%s:latest\n' \ - ref "${user_lowercase}" "${IMAGE_NAME}" \ - tag "${user_lowercase}" "${IMAGE_NAME}" ; + printf >> "$GITHUB_OUTPUT" -- '%s=ghcr.io/%s/%s:%s\n' \ + 'ref' "${user_lowercase}" "${IMAGE_NAME}" 'cache' \ + 'tag' "${user_lowercase}" "${IMAGE_NAME}" 'latest' ; - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx From d27bf4bdde99c0f79a7fac5f373b6245a8d4812e Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 28 Jan 2025 15:31:48 -0500 Subject: [PATCH 140/220] Remove duplicated rename_files function This was duplicated by the conflict resolution in: 3cfc4bf9a58b3d33759ae67934dcea5eb3608a04 --- tubesync/sync/models.py | 75 ----------------------------------------- 1 file changed, 75 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 7112e3e9..f1079927 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -1643,81 +1643,6 @@ def rename_files(self): pass - def rename_files(self): - if self.downloaded and self.media_file: - old_video_path = Path(self.media_file.path) - new_video_path = Path(get_media_file_path(self, None)) - if old_video_path.exists() and not new_video_path.exists(): - old_video_path = old_video_path.resolve(strict=True) - - # move video to destination - mkdir_p(new_video_path.parent) - log.debug(f'{self!s}: {old_video_path!s} => {new_video_path!s}') - old_video_path.rename(new_video_path) - log.info(f'Renamed video file for: {self!s}') - - # collect the list of files to move - # this should not include the video we just moved - (old_prefix_path, old_stem) = directory_and_stem(old_video_path) - other_paths = list(old_prefix_path.glob(glob_quote(old_stem) + '*')) - log.info(f'Collected {len(other_paths)} other paths for: {self!s}') - - # adopt orphaned files, if possible - media_format = str(self.source.media_format) - top_dir_path = Path(self.source.directory_path) - if '{key}' in media_format: - fuzzy_paths = list(top_dir_path.rglob('*' + glob_quote(str(self.key)) + '*')) - log.info(f'Collected {len(fuzzy_paths)} fuzzy paths for: {self!s}') - - if new_video_path.exists(): - new_video_path = new_video_path.resolve(strict=True) - - # update the media_file in the db - self.media_file.name = str(new_video_path.relative_to(self.media_file.storage.location)) - self.save(update_fields={'media_file'}) - self.refresh_from_db(fields={'media_file'}) - log.info(f'Updated "media_file" in the database for: {self!s}') - - (new_prefix_path, new_stem) = directory_and_stem(new_video_path) - - # move and change names to match stem - for other_path in other_paths: - old_file_str = other_path.name - new_file_str = new_stem + old_file_str[len(old_stem):] - new_file_path = Path(new_prefix_path / new_file_str) - log.debug(f'Considering replace for: {self!s}\n\t{other_path!s}\n\t{new_file_path!s}') - # it should exist, but check anyway - if other_path.exists(): - log.debug(f'{self!s}: {other_path!s} => {new_file_path!s}') - other_path.replace(new_file_path) - - for fuzzy_path in fuzzy_paths: - (fuzzy_prefix_path, fuzzy_stem) = directory_and_stem(fuzzy_path) - old_file_str = fuzzy_path.name - new_file_str = new_stem + old_file_str[len(fuzzy_stem):] - new_file_path = Path(new_prefix_path / new_file_str) - log.debug(f'Considering rename for: {self!s}\n\t{fuzzy_path!s}\n\t{new_file_path!s}') - # it quite possibly was renamed already - if fuzzy_path.exists() and not new_file_path.exists(): - log.debug(f'{self!s}: {fuzzy_path!s} => {new_file_path!s}') - fuzzy_path.rename(new_file_path) - - # The thumbpath inside the .nfo file may have changed - if self.source.write_nfo and self.source.copy_thumbnails: - write_text_file(new_prefix_path / self.nfopath.name, self.nfoxml) - log.info(f'Wrote new ".nfo" file for: {self!s}') - - # try to remove empty dirs - parent_dir = old_video_path.parent - try: - while parent_dir.is_dir(): - parent_dir.rmdir() - log.info(f'Removed empty directory: {parent_dir!s}') - parent_dir = parent_dir.parent - except OSError as e: - pass - - class MediaServer(models.Model): ''' A remote media server, such as a Plex server. From 956b1daf012453701a9c183f82b1d6871bcd95a4 Mon Sep 17 00:00:00 2001 From: tcely Date: Tue, 28 Jan 2025 23:05:12 -0500 Subject: [PATCH 141/220] Update ci.yaml --- .github/workflows/ci.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 4d6ed655..ab855ece 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -80,6 +80,7 @@ jobs: with: platforms: linux/amd64,linux/arm64 push: true + provenance: false tags: ${{ steps.origin.outputs.tag }} cache-from: | type=registry,ref=${{ steps.upstream.outputs.ref }} From 4a859c6fcd237a5290e98706b48a7fd4cdb4f732 Mon Sep 17 00:00:00 2001 From: tcely Date: Wed, 29 Jan 2025 10:59:39 -0500 Subject: [PATCH 142/220] Use strings for outdoors paths --- tubesync/sync/youtube.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/youtube.py b/tubesync/sync/youtube.py index c0360ca9..958990f3 100644 --- a/tubesync/sync/youtube.py +++ b/tubesync/sync/youtube.py @@ -253,11 +253,12 @@ def hook(event): v_key = parse_qs(urlsplit(url).query).get('v').pop() temp_dir_parent = ytopts['paths']['temp'] temp_dir_prefix = f'{temp_dir_prefix}{v_key}-' - temp_dir = TemporaryDirectory(prefix=temp_dir_prefix,dir=temp_dir_parent) - (Path(temp_dir.name) / '.ignore').touch(exist_ok=True) + temp_dir_obj = TemporaryDirectory(prefix=temp_dir_prefix,dir=temp_dir_parent) + temp_dir_path = Path(temp_dir_obj.name) + (temp_dir_path / '.ignore').touch(exist_ok=True) ytopts['paths'].update({ - 'home': output_dir, - 'temp': temp_dir.name, + 'home': str(output_dir), + 'temp': str(temp_dir_path), }) codec_options = [] From 2b2da214eae2460dfc3f8995a51eb7623d635267 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 01:53:56 -0500 Subject: [PATCH 143/220] Build `CommaSepChoiceField` from `CharField` This is a better fit, and were get to use the existing `db_type` configuration. --- tubesync/sync/fields.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 31889024..3702f7e9 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -48,12 +48,9 @@ def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: return { 'widget': ctx } # this is a database field! -class CommaSepChoiceField(models.Field): +class CommaSepChoiceField(models.CharField): "Implements comma-separated storage of lists" - # If 'text' isn't correct add the vendor override here. - _DB_TYPES = {} - def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, **kwargs): super().__init__(*args, **kwargs) self.separator = str(separator) @@ -70,9 +67,8 @@ def deconstruct(self): kwargs['possible_choices'] = self.possible_choices return name, path, args, kwargs - def db_type(self, connection): - value = self._DB_TYPES.get(connection.vendor, None) - return value if value is not None else 'text' + def get_internal_type(self): + return super().get_internal_type() def get_my_choices(self): choiceArray = [] @@ -116,6 +112,10 @@ def get_prep_value(self, value): else: return self.all_choice + def pre_save(self, model_instance, add=False): + obj = super().pre_save(model_instance, add) + return self.get_prep_value(obj.selected_choices) + def get_text_for_value(self, val): fval = [i for i in self.possible_choices if i[0] == val] if len(fval) <= 0: From 53514289ed175412fcac0e8728d257d6caf2af0a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 01:58:01 -0500 Subject: [PATCH 144/220] Add `max_length` required by `CharField` --- tubesync/sync/models.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 877b62e6..2ff176ca 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -118,6 +118,7 @@ class Source(models.Model): sponsorblock_categories = CommaSepChoiceField( _(''), + max_length=128, possible_choices=SponsorBlock_Category.choices, all_choice='all', allow_all=True, From edb9787d2fcdb7fe26ec679a42400f0fe9aa6c2b Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 02:19:52 -0500 Subject: [PATCH 145/220] Add migration for `CommaSepChoiceField` --- ...28_alter_source_sponsorblock_categories.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) create mode 100644 tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py diff --git a/tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py b/tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py new file mode 100644 index 00000000..fc81ea8c --- /dev/null +++ b/tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py @@ -0,0 +1,19 @@ +# Generated by Django 3.2.25 on 2025-01-31 07:10 + +from django.db import migrations +import sync.fields + + +class Migration(migrations.Migration): + + dependencies = [ + ('sync', '0027_alter_source_sponsorblock_categories'), + ] + + operations = [ + migrations.AlterField( + model_name='source', + name='sponsorblock_categories', + field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', max_length=128, possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), + ), + ] From 43dabe3da825c67dc0b28af324abe0fdb696af78 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 02:36:33 -0500 Subject: [PATCH 146/220] Update 0016_auto_20230214_2052.py Added the required `max_length` to the field. --- tubesync/sync/migrations/0016_auto_20230214_2052.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/migrations/0016_auto_20230214_2052.py b/tubesync/sync/migrations/0016_auto_20230214_2052.py index ffba1952..f41f6b29 100644 --- a/tubesync/sync/migrations/0016_auto_20230214_2052.py +++ b/tubesync/sync/migrations/0016_auto_20230214_2052.py @@ -29,6 +29,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='source', name='sponsorblock_categories', - field=sync.models.CommaSepChoiceField(default='all', possible_choices=(('all', 'All'), ('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section'))), + field=sync.models.CommaSepChoiceField(default='all', max_length=128, possible_choices=(('all', 'All'), ('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section'))), ), ] From 779c266787aec7835e21dec1f91045d8e7df424b Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 02:48:24 -0500 Subject: [PATCH 147/220] Update 0017_alter_source_sponsorblock_categories.py --- .../migrations/0017_alter_source_sponsorblock_categories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py b/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py index cc9d9578..c099aafc 100644 --- a/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py +++ b/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py @@ -14,6 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='source', name='sponsorblock_categories', - field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', separator=''), + field=sync.fields.CommaSepChoiceField(default='all', max_length=128, help_text='Select the sponsorblocks you want to enforce', separator=''), ), ] From ee9bb8b83429c405b9a6196397bcdb0364ecf394 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 02:50:26 -0500 Subject: [PATCH 148/220] Update 0027_alter_source_sponsorblock_categories.py --- .../migrations/0027_alter_source_sponsorblock_categories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py b/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py index 92fbc98a..a34e9745 100644 --- a/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py +++ b/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py @@ -14,6 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='source', name='sponsorblock_categories', - field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), + field=sync.fields.CommaSepChoiceField(default='all', max_length=128, help_text='Select the sponsorblocks you want to enforce', possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), ), ] From 5e4069448e31c35172788c1337abefe6fb5a330c Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 03:05:29 -0500 Subject: [PATCH 149/220] Convert string to selected_choices --- tubesync/sync/fields.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 3702f7e9..1c4bc165 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -114,7 +114,10 @@ def get_prep_value(self, value): def pre_save(self, model_instance, add=False): obj = super().pre_save(model_instance, add) - return self.get_prep_value(obj.selected_choices) + if isinstance(obj, str): + self.from_db_value(obj, None, None) + selected = self.selected_choices + return self.get_prep_value(selected) def get_text_for_value(self, val): fval = [i for i in self.possible_choices if i[0] == val] From f714c30377bd5d4cc17bc93bb3da0d3b90bd6618 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 03:58:35 -0500 Subject: [PATCH 150/220] Prevent my environment variables from causing failures --- tubesync/sync/tests.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/tests.py b/tubesync/sync/tests.py index 2704058f..1a9e9ed2 100644 --- a/tubesync/sync/tests.py +++ b/tubesync/sync/tests.py @@ -12,7 +12,7 @@ from urllib.parse import urlsplit from xml.etree import ElementTree from django.conf import settings -from django.test import TestCase, Client +from django.test import TestCase, Client, override_settings from django.utils import timezone from background_task.models import Task from .models import Source, Media @@ -1738,6 +1738,7 @@ def setUp(self): metadata='{}' ) + @override_settings(SHRINK_OLD_MEDIA_METADATA=False, SHRINK_NEW_MEDIA_METADATA=False) def test_metadata_20230629(self): self.media.metadata = all_test_metadata['20230629'] self.media.save() From 1117ba69dd46a8cbdc56d465e68d549f2102646b Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 05:04:12 -0500 Subject: [PATCH 151/220] Fix an unexpected keyword argument 'max_length' ``` TypeError: Field.__init__() got an unexpected keyword argument 'max_length' ``` --- tubesync/sync/fields.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 1c4bc165..eb37ae5d 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -91,7 +91,8 @@ def formfield(self, **kwargs): 'label': '', 'required': False} defaults.update(kwargs) - return super().formfield(**defaults) + # CharField calls with an extra 'max_length' that we must avoid. + return models.Field.formfield(self, **defaults) def from_db_value(self, value, expr, conn): if 0 == len(value) or value is None: From 713dedd854d053b233222e68e4dbe20750eebc3a Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 05:26:11 -0500 Subject: [PATCH 152/220] Go back to models.Field It didn't work any better that way. I'm still borrowing the `CharField` `db_type` support. --- tubesync/sync/fields.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index eb37ae5d..f22177af 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -48,7 +48,7 @@ def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: return { 'widget': ctx } # this is a database field! -class CommaSepChoiceField(models.CharField): +class CommaSepChoiceField(models.Field): "Implements comma-separated storage of lists" def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, **kwargs): @@ -68,7 +68,7 @@ def deconstruct(self): return name, path, args, kwargs def get_internal_type(self): - return super().get_internal_type() + return 'CharField' def get_my_choices(self): choiceArray = [] @@ -91,8 +91,7 @@ def formfield(self, **kwargs): 'label': '', 'required': False} defaults.update(kwargs) - # CharField calls with an extra 'max_length' that we must avoid. - return models.Field.formfield(self, **defaults) + return super().formfield(self, **defaults) def from_db_value(self, value, expr, conn): if 0 == len(value) or value is None: @@ -113,12 +112,11 @@ def get_prep_value(self, value): else: return self.all_choice - def pre_save(self, model_instance, add=False): + def pre_save(self, model_instance, add): obj = super().pre_save(model_instance, add) if isinstance(obj, str): self.from_db_value(obj, None, None) - selected = self.selected_choices - return self.get_prep_value(selected) + return self.get_prep_value(self.selected_choices) def get_text_for_value(self, val): fval = [i for i in self.possible_choices if i[0] == val] From 12e046d05897064290b8f070a46efda489ac69ce Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 07:57:02 -0500 Subject: [PATCH 153/220] Serialization changes --- tubesync/sync/fields.py | 74 ++++++++++++++++++++--------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index f22177af..f909731f 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -52,6 +52,7 @@ class CommaSepChoiceField(models.Field): "Implements comma-separated storage of lists" def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, **kwargs): + kwargs.setdefault('max_length', 128) super().__init__(*args, **kwargs) self.separator = str(separator) self.possible_choices = possible_choices @@ -59,6 +60,10 @@ def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice= self.allow_all = allow_all self.all_label = all_label self.all_choice = all_choice + self.choices = self.get_choices() + + def get_internal_type(self): + return 'CharField' def deconstruct(self): name, path, args, kwargs = super().deconstruct() @@ -67,56 +72,51 @@ def deconstruct(self): kwargs['possible_choices'] = self.possible_choices return name, path, args, kwargs - def get_internal_type(self): - return 'CharField' - - def get_my_choices(self): - choiceArray = [] + def get_choices(self, *args, **kwargs): + choice_list = list() if self.possible_choices is None: - return choiceArray + return choice_list if self.allow_all: - choiceArray.append((self.all_choice, _(self.all_label))) + choice_list.append((self.all_choice, _(self.all_label))) for t in self.possible_choices: - choiceArray.append(t) + choice_list.append(t) - return choiceArray + return choice_list def formfield(self, **kwargs): # This is a fairly standard way to set up some defaults # while letting the caller override them. - defaults = {'form_class': MultipleChoiceField, - 'choices': self.get_my_choices, - 'widget': CustomCheckboxSelectMultiple, - 'label': '', - 'required': False} + defaults = { + 'form_class': MultipleChoiceField, + 'choices_form_class': MultipleChoiceField, + 'widget': CustomCheckboxSelectMultiple, + 'label': '', + 'required': False, + } defaults.update(kwargs) return super().formfield(self, **defaults) - def from_db_value(self, value, expr, conn): - if 0 == len(value) or value is None: - self.selected_choices = [] - else: - self.selected_choices = value.split(self.separator) - - return self - def get_prep_value(self, value): - if value is None: - return "" - if not isinstance(value,list): - return "" - - if self.all_choice not in value: - return self.separator.join(value) - else: - return self.all_choice - - def pre_save(self, model_instance, add): - obj = super().pre_save(model_instance, add) - if isinstance(obj, str): - self.from_db_value(obj, None, None) - return self.get_prep_value(self.selected_choices) + value = super().get_prep_value(value) + return self.to_python(value) + + def to_python(self, value): + # string to list + value = super().to_python(value) + if isinstance(value, str) and len(value) > 0: + value = value.split(self.separator) + if not isinstance(value, list): + value = list() + + def value_to_string(self, obj): + # selected_choices to a string + if not isinstance(obj, self.__class__): + return '' + value = self.value_from_object(obj) + if obj.all_choice in value: + return obj.all_choice + return obj.separator.join(value) def get_text_for_value(self, val): fval = [i for i in self.possible_choices if i[0] == val] From cf3dfdb6084318e2bd6a364060cb9173058a61ac Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 08:10:41 -0500 Subject: [PATCH 154/220] fixup: return the value --- tubesync/sync/fields.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index f909731f..24f7b1b1 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -108,6 +108,7 @@ def to_python(self, value): value = value.split(self.separator) if not isinstance(value, list): value = list() + return value def value_to_string(self, obj): # selected_choices to a string From 6ab4dc528046b530ca9cda11db2f803c42e381fc Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 10:39:34 -0500 Subject: [PATCH 155/220] More fixes from testing --- tubesync/sync/fields.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 24f7b1b1..ae524708 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -60,7 +60,8 @@ def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice= self.allow_all = allow_all self.all_label = all_label self.all_choice = all_choice - self.choices = self.get_choices() + self.choices = None + # self.choices = self.get_all_choices() def get_internal_type(self): return 'CharField' @@ -72,7 +73,7 @@ def deconstruct(self): kwargs['possible_choices'] = self.possible_choices return name, path, args, kwargs - def get_choices(self, *args, **kwargs): + def get_all_choices(self): choice_list = list() if self.possible_choices is None: return choice_list @@ -89,13 +90,14 @@ def formfield(self, **kwargs): # while letting the caller override them. defaults = { 'form_class': MultipleChoiceField, - 'choices_form_class': MultipleChoiceField, + # 'choices_form_class': MultipleChoiceField, 'widget': CustomCheckboxSelectMultiple, + 'choices': self.get_all_choices(), 'label': '', 'required': False, } defaults.update(kwargs) - return super().formfield(self, **defaults) + return super().formfield(**defaults) def get_prep_value(self, value): value = super().get_prep_value(value) From 41b6a0c8875bd35f521c021fb07e634d4569db7b Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 11:03:56 -0500 Subject: [PATCH 156/220] More fixes --- tubesync/sync/fields.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index ae524708..f5e66594 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -101,7 +101,11 @@ def formfield(self, **kwargs): def get_prep_value(self, value): value = super().get_prep_value(value) - return self.to_python(value) + if not isinstance(value, list): + return value + if self.all_choice in value: + return self.all_choice + return self.separator.join(value) def to_python(self, value): # string to list @@ -114,8 +118,8 @@ def to_python(self, value): def value_to_string(self, obj): # selected_choices to a string - if not isinstance(obj, self.__class__): - return '' + # if not isinstance(obj, self.__class__): + # return '' value = self.value_from_object(obj) if obj.all_choice in value: return obj.all_choice From 19de389508ea6c2eb58f3d8bdc620fbca0edcbbd Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 11:12:09 -0500 Subject: [PATCH 157/220] Revert 0016_auto_20230214_2052.py --- tubesync/sync/migrations/0016_auto_20230214_2052.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/migrations/0016_auto_20230214_2052.py b/tubesync/sync/migrations/0016_auto_20230214_2052.py index f41f6b29..ffba1952 100644 --- a/tubesync/sync/migrations/0016_auto_20230214_2052.py +++ b/tubesync/sync/migrations/0016_auto_20230214_2052.py @@ -29,6 +29,6 @@ class Migration(migrations.Migration): migrations.AddField( model_name='source', name='sponsorblock_categories', - field=sync.models.CommaSepChoiceField(default='all', max_length=128, possible_choices=(('all', 'All'), ('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section'))), + field=sync.models.CommaSepChoiceField(default='all', possible_choices=(('all', 'All'), ('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section'))), ), ] From 7affddf2d6499cc6f4b1e395c5cc59341af7d927 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 11:14:15 -0500 Subject: [PATCH 158/220] Revert 0017_alter_source_sponsorblock_categories.py --- .../migrations/0017_alter_source_sponsorblock_categories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py b/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py index c099aafc..cc9d9578 100644 --- a/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py +++ b/tubesync/sync/migrations/0017_alter_source_sponsorblock_categories.py @@ -14,6 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='source', name='sponsorblock_categories', - field=sync.fields.CommaSepChoiceField(default='all', max_length=128, help_text='Select the sponsorblocks you want to enforce', separator=''), + field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', separator=''), ), ] From a60e7667d47204bec3c57e8db07e0b3d26e65b59 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 11:16:13 -0500 Subject: [PATCH 159/220] Revert 0027_alter_source_sponsorblock_categories.py --- .../migrations/0027_alter_source_sponsorblock_categories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py b/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py index a34e9745..92fbc98a 100644 --- a/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py +++ b/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py @@ -14,6 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='source', name='sponsorblock_categories', - field=sync.fields.CommaSepChoiceField(default='all', max_length=128, help_text='Select the sponsorblocks you want to enforce', possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), + field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), ), ] From 0ca66b5895b10b6318d64614ccf74e8b49b4d9e7 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 15:18:08 -0500 Subject: [PATCH 160/220] Delete tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py --- ...28_alter_source_sponsorblock_categories.py | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py diff --git a/tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py b/tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py deleted file mode 100644 index fc81ea8c..00000000 --- a/tubesync/sync/migrations/0028_alter_source_sponsorblock_categories.py +++ /dev/null @@ -1,19 +0,0 @@ -# Generated by Django 3.2.25 on 2025-01-31 07:10 - -from django.db import migrations -import sync.fields - - -class Migration(migrations.Migration): - - dependencies = [ - ('sync', '0027_alter_source_sponsorblock_categories'), - ] - - operations = [ - migrations.AlterField( - model_name='source', - name='sponsorblock_categories', - field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', max_length=128, possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), - ), - ] From b95cacdf692a8323ee7ff96e9170b57ea020425e Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 23:08:34 -0500 Subject: [PATCH 161/220] Add files via upload --- tubesync/sync/fields.py | 147 ++++++++++++++++++++++++++-------------- 1 file changed, 98 insertions(+), 49 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index f5e66594..e42224e5 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -1,6 +1,8 @@ -from django.forms import MultipleChoiceField, CheckboxSelectMultiple, Field, TypedMultipleChoiceField -from django.db import models +from collections import namedtuple +from functools import lru_cache from typing import Any, Optional, Dict +from django import forms +from django.db import models from django.utils.translation import gettext_lazy as _ @@ -22,55 +24,104 @@ class SponsorBlock_Category(models.TextChoices): MUSIC_OFFTOPIC = 'music_offtopic', _( 'Non-Music Section' ) +CommaSepChoice = namedtuple( + 'CommaSepChoice', + ' '.join([ + 'allow_all', + 'all_choice', + 'all_label', + 'possible_choices', + 'selected_choices', + ]) +) + # this is a form field! -class CustomCheckboxSelectMultiple(CheckboxSelectMultiple): +class CustomCheckboxSelectMultiple(forms.CheckboxSelectMultiple): template_name = 'widgets/checkbox_select.html' option_template_name = 'widgets/checkbox_option.html' + from common.logger import log def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: + # self.log.debug(f'wgc:1: {type(name)} {repr(name)}') + # self.log.debug(f'wgc:2: {type(value)} {repr(value)}') + # self.log.debug(f'wgc:3: {type(attrs)} {repr(attrs)}') + data = value + select_all = False + if isinstance(data, CommaSepChoice): + select_all = (data.allow_all and data.all_choice in data.selected_choices) + value = data.selected_choices ctx = super().get_context(name, value, attrs)['widget'] + # self.log.debug(f'wgc:4: {type(ctx)} {repr(ctx)}') ctx["multipleChoiceProperties"] = [] for _group, options, _index in ctx["optgroups"]: + # self.log.debug(f'wgc:5: {type(options)} {repr(options)}') for option in options: - if not isinstance(value,str) and not isinstance(value,list) and ( option["value"] in value.selected_choices or ( value.allow_all and value.all_choice in value.selected_choices ) ): - checked = True - else: - checked = False - - ctx["multipleChoiceProperties"].append({ - "template_name": option["template_name"], - "type": option["type"], - "value": option["value"], - "label": option["label"], - "name": option["name"], - "checked": checked}) + option["checked"] = option["selected"] or select_all + ctx["multipleChoiceProperties"].append(option) return { 'widget': ctx } +class Disabled: + pass + def value_to_string(self, obj): + # selected_choices to a string + # if not isinstance(obj, self.__class__): + # return '' + self.log.info("vts:1: %s %s", type(obj), repr(obj)) + value = self.value_from_object(obj) + self.log.info("vts:2: %s %s", type(value), repr(value)) + if obj.all_choice in value: + return obj.all_choice + return obj.separator.join(value) + + def get_text_for_value(self, val): + fval = [i for i in self.possible_choices if i[0] == val] + if len(fval) <= 0: + return [] + else: + return fval[0][1] + + # this is a database field! -class CommaSepChoiceField(models.Field): +class CommaSepChoiceField(models.CharField): "Implements comma-separated storage of lists" + widget = CustomCheckboxSelectMultiple + from common.logger import log + def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, **kwargs): kwargs.setdefault('max_length', 128) super().__init__(*args, **kwargs) self.separator = str(separator) self.possible_choices = possible_choices - self.selected_choices = [] + self.selected_choices = list() self.allow_all = allow_all self.all_label = all_label self.all_choice = all_choice - self.choices = None - # self.choices = self.get_all_choices() + self.choices = self.get_all_choices() + self.validators.clear() + + + # Override these functions to prevent unwanted behaviors + def to_python(self, value): + self.log.debug(f'to_py:1: {type(value)} {repr(value)}') + return value def get_internal_type(self): - return 'CharField' + return super().get_internal_type() + def deconstruct(self): name, path, args, kwargs = super().deconstruct() if ',' != self.separator: kwargs['separator'] = self.separator kwargs['possible_choices'] = self.possible_choices + if self.allow_all: + kwargs['allow_all'] = self.allow_all + if self.all_choice: + kwargs['all_choice'] = self.all_choice + if 'All' != self.all_label: + kwargs['all_label'] = self.all_label return name, path, args, kwargs def get_all_choices(self): @@ -89,9 +140,9 @@ def formfield(self, **kwargs): # This is a fairly standard way to set up some defaults # while letting the caller override them. defaults = { - 'form_class': MultipleChoiceField, - # 'choices_form_class': MultipleChoiceField, - 'widget': CustomCheckboxSelectMultiple, + 'form_class': forms.MultipleChoiceField, + # 'choices_form_class': forms.MultipleChoiceField, + 'widget': self.widget, 'choices': self.get_all_choices(), 'label': '', 'required': False, @@ -99,35 +150,33 @@ def formfield(self, **kwargs): defaults.update(kwargs) return super().formfield(**defaults) - def get_prep_value(self, value): - value = super().get_prep_value(value) - if not isinstance(value, list): - return value - if self.all_choice in value: - return self.all_choice - return self.separator.join(value) - - def to_python(self, value): - # string to list - value = super().to_python(value) + @lru_cache(maxsize=10) + def from_db_value(self, value, expression, connection): + self.log.debug(f'fdbv:1: {type(value)} {repr(value)}') if isinstance(value, str) and len(value) > 0: value = value.split(self.separator) if not isinstance(value, list): value = list() - return value + self.selected_choices = value + return CommaSepChoice( + allow_all=self.allow_all, + all_choice=self.all_choice, + all_label=self.all_label, + possible_choices=self.choices, + selected_choices=self.selected_choices, + ) + + def get_prep_value(self, value): + self.log.debug(f'gpv:1: {type(value)} {repr(value)}') + s_value = super().get_prep_value(value) + self.log.debug(f'gpv:2: {type(s_value)} {repr(s_value)}') + data = value + if isinstance(value, CommaSepChoice): + value = value.selected_choices + if not isinstance(value, list): + return '' + if data.all_choice in value: + return data.all_choice + return data.separator.join(value) - def value_to_string(self, obj): - # selected_choices to a string - # if not isinstance(obj, self.__class__): - # return '' - value = self.value_from_object(obj) - if obj.all_choice in value: - return obj.all_choice - return obj.separator.join(value) - def get_text_for_value(self, val): - fval = [i for i in self.possible_choices if i[0] == val] - if len(fval) <= 0: - return [] - else: - return fval[0][1] From 15e33036367ee3dc524a77f6c5478678f5eb77fd Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 23:33:16 -0500 Subject: [PATCH 162/220] Cleanup the uploaded file and document hard won knowledge --- tubesync/sync/fields.py | 71 ++++++++++++++++++----------------------- 1 file changed, 31 insertions(+), 40 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index e42224e5..441474e2 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -39,52 +39,32 @@ class SponsorBlock_Category(models.TextChoices): class CustomCheckboxSelectMultiple(forms.CheckboxSelectMultiple): template_name = 'widgets/checkbox_select.html' option_template_name = 'widgets/checkbox_option.html' - from common.logger import log def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: - # self.log.debug(f'wgc:1: {type(name)} {repr(name)}') - # self.log.debug(f'wgc:2: {type(value)} {repr(value)}') - # self.log.debug(f'wgc:3: {type(attrs)} {repr(attrs)}') data = value select_all = False if isinstance(data, CommaSepChoice): select_all = (data.allow_all and data.all_choice in data.selected_choices) value = data.selected_choices ctx = super().get_context(name, value, attrs)['widget'] - # self.log.debug(f'wgc:4: {type(ctx)} {repr(ctx)}') - ctx["multipleChoiceProperties"] = [] + ctx["multipleChoiceProperties"] = list() for _group, options, _index in ctx["optgroups"]: - # self.log.debug(f'wgc:5: {type(options)} {repr(options)}') + # `options` is a list containing a single dictionary. + # That naming was a bit misleading, + # I may change it to `option_list`, or a better alternative, later. for option in options: + # Using `checked` instead of `selected` here is sub-optimal. option["checked"] = option["selected"] or select_all ctx["multipleChoiceProperties"].append(option) return { 'widget': ctx } -class Disabled: - pass - def value_to_string(self, obj): - # selected_choices to a string - # if not isinstance(obj, self.__class__): - # return '' - self.log.info("vts:1: %s %s", type(obj), repr(obj)) - value = self.value_from_object(obj) - self.log.info("vts:2: %s %s", type(value), repr(value)) - if obj.all_choice in value: - return obj.all_choice - return obj.separator.join(value) - - def get_text_for_value(self, val): - fval = [i for i in self.possible_choices if i[0] == val] - if len(fval) <= 0: - return [] - else: - return fval[0][1] - # this is a database field! class CommaSepChoiceField(models.CharField): - "Implements comma-separated storage of lists" + ''' + Implements comma-separated storage of lists + ''' widget = CustomCheckboxSelectMultiple from common.logger import log @@ -111,6 +91,7 @@ def get_internal_type(self): return super().get_internal_type() + # standard functions for this class def deconstruct(self): name, path, args, kwargs = super().deconstruct() if ',' != self.separator: @@ -124,18 +105,6 @@ def deconstruct(self): kwargs['all_label'] = self.all_label return name, path, args, kwargs - def get_all_choices(self): - choice_list = list() - if self.possible_choices is None: - return choice_list - if self.allow_all: - choice_list.append((self.all_choice, _(self.all_label))) - - for t in self.possible_choices: - choice_list.append(t) - - return choice_list - def formfield(self, **kwargs): # This is a fairly standard way to set up some defaults # while letting the caller override them. @@ -152,6 +121,13 @@ def formfield(self, **kwargs): @lru_cache(maxsize=10) def from_db_value(self, value, expression, connection): + ''' + Create a data structure to be used in Python code. + + This is called quite often with the same input, + because the database value doesn't change often. + So, it's being cached to prevent excessive logging. + ''' self.log.debug(f'fdbv:1: {type(value)} {repr(value)}') if isinstance(value, str) and len(value) > 0: value = value.split(self.separator) @@ -167,6 +143,9 @@ def from_db_value(self, value, expression, connection): ) def get_prep_value(self, value): + ''' + Create a value to be stored in the database. + ''' self.log.debug(f'gpv:1: {type(value)} {repr(value)}') s_value = super().get_prep_value(value) self.log.debug(f'gpv:2: {type(s_value)} {repr(s_value)}') @@ -179,4 +158,16 @@ def get_prep_value(self, value): return data.all_choice return data.separator.join(value) + # extra functions not used by any parent classes + def get_all_choices(self): + choice_list = list() + if self.possible_choices is None: + return choice_list + if self.allow_all: + choice_list.append((self.all_choice, _(self.all_label))) + + for t in self.possible_choices: + choice_list.append(t) + + return choice_list From 3f2d16463443513cdf7de213704651ae2feb2a98 Mon Sep 17 00:00:00 2001 From: tcely Date: Fri, 31 Jan 2025 23:47:18 -0500 Subject: [PATCH 163/220] Add `separator` for use by `get_prep_value` --- tubesync/sync/fields.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 441474e2..231aa0c2 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -32,6 +32,7 @@ class SponsorBlock_Category(models.TextChoices): 'all_label', 'possible_choices', 'selected_choices', + 'separator', ]) ) @@ -140,6 +141,7 @@ def from_db_value(self, value, expression, connection): all_label=self.all_label, possible_choices=self.choices, selected_choices=self.selected_choices, + separator=self.separator, ) def get_prep_value(self, value): From ad807511ecb70d4e87de4a2c6bdacef40bd10c2a Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 00:25:27 -0500 Subject: [PATCH 164/220] `t` is a poor name for the choice --- tubesync/sync/fields.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 231aa0c2..2056e26e 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -168,8 +168,8 @@ def get_all_choices(self): if self.allow_all: choice_list.append((self.all_choice, _(self.all_label))) - for t in self.possible_choices: - choice_list.append(t) + for choice in self.possible_choices: + choice_list.append(choice) return choice_list From ccf24e025818bc840117d027f59d90a579a3de73 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 01:49:58 -0500 Subject: [PATCH 165/220] Matched up names can be passed as a filtered dictionary --- tubesync/sync/fields.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 2056e26e..34c94b63 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -135,14 +135,8 @@ def from_db_value(self, value, expression, connection): if not isinstance(value, list): value = list() self.selected_choices = value - return CommaSepChoice( - allow_all=self.allow_all, - all_choice=self.all_choice, - all_label=self.all_label, - possible_choices=self.choices, - selected_choices=self.selected_choices, - separator=self.separator, - ) + args_dict = {key: self.__dict__[key] for key in CommaSepChoice._fields} + return CommaSepChoice(**args_dict) def get_prep_value(self, value): ''' From eaea061e2713d690afe64a3d2abdb48dbb516b3e Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 02:03:04 -0500 Subject: [PATCH 166/220] Add a comment about the compact `formfield` args method --- tubesync/sync/fields.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 34c94b63..c13a04eb 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -119,6 +119,11 @@ def formfield(self, **kwargs): } defaults.update(kwargs) return super().formfield(**defaults) + # This is a more compact way to do the same thing + # return super().formfield(**{ + # 'form_class': forms.MultipleChoiceField, + # **kwargs, + # }) @lru_cache(maxsize=10) def from_db_value(self, value, expression, connection): From 20ca17cd2e43fa40501bafa7247a77f5abad10a7 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 02:09:27 -0500 Subject: [PATCH 167/220] Add functions that may help with the validation problem --- tubesync/sync/fields.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index c13a04eb..a61c0ffa 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -91,6 +91,10 @@ def to_python(self, value): def get_internal_type(self): return super().get_internal_type() + # maybe useful? + def value_to_string(self, obj): + return self.value_from_object(obj) + # standard functions for this class def deconstruct(self): @@ -106,6 +110,15 @@ def deconstruct(self): kwargs['all_label'] = self.all_label return name, path, args, kwargs + # maybe useful? + def check(self, **kwargs): + errors = super().check(**kwargs) + return eerrors + + # maybe useful? + def validate(self, value, model_instance): + super().validate(value, model_instance) + def formfield(self, **kwargs): # This is a fairly standard way to set up some defaults # while letting the caller override them. From bac635563e859118f4208f369fa45d610b6a6740 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 03:10:56 -0500 Subject: [PATCH 168/220] An extra 'e' has appeared --- tubesync/sync/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index a61c0ffa..50fdd2fe 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -113,7 +113,7 @@ def deconstruct(self): # maybe useful? def check(self, **kwargs): errors = super().check(**kwargs) - return eerrors + return errors # maybe useful? def validate(self, value, model_instance): From b18d19fe6f0967e275541c8dec3c8867b5e2cf98 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 12:21:32 -0500 Subject: [PATCH 169/220] Provide default values for `CommaSepChoice` --- tubesync/sync/fields.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 50fdd2fe..cdde8960 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -25,15 +25,22 @@ class SponsorBlock_Category(models.TextChoices): CommaSepChoice = namedtuple( - 'CommaSepChoice', - ' '.join([ + 'CommaSepChoice', [ 'allow_all', 'all_choice', 'all_label', 'possible_choices', 'selected_choices', 'separator', - ]) + ], + defaults = ( + False, + None, + 'All', + list(), + list(), + ',', + ), ) # this is a form field! From 5f3c60e4cc19d6f8cf814a73008b7ec7479516e4 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 12:33:53 -0500 Subject: [PATCH 170/220] Remove unused import: `Optional` --- tubesync/sync/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index cdde8960..56bb764e 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -1,6 +1,6 @@ from collections import namedtuple from functools import lru_cache -from typing import Any, Optional, Dict +from typing import Any, Dict from django import forms from django.db import models from django.utils.translation import gettext_lazy as _ From 526931de5ed1308bc26e00b2e2344ef5d9b7c095 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 13:21:40 -0500 Subject: [PATCH 171/220] Replace `option.checked` with `option.selected` The context already sets `selected` appropriately, which value that means for the HTML is a detail for the template to handle. --- tubesync/sync/templates/widgets/checkbox_option.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/templates/widgets/checkbox_option.html b/tubesync/sync/templates/widgets/checkbox_option.html index 06a6723e..db32a457 100644 --- a/tubesync/sync/templates/widgets/checkbox_option.html +++ b/tubesync/sync/templates/widgets/checkbox_option.html @@ -2,6 +2,6 @@ --> \ No newline at end of file + From f60b0f7fbcc858cf1ee3cb5cc41bfa663a1c8bea Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 13:23:17 -0500 Subject: [PATCH 172/220] Stop setting the additional `checked` key --- tubesync/sync/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 56bb764e..229baa13 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -62,7 +62,7 @@ def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: # I may change it to `option_list`, or a better alternative, later. for option in options: # Using `checked` instead of `selected` here is sub-optimal. - option["checked"] = option["selected"] or select_all + option["selected"] = option["selected"] or select_all ctx["multipleChoiceProperties"].append(option) return { 'widget': ctx } From 9bc1a84d4714e7ffa0576ae3e48d85d033b24455 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 13:43:53 -0500 Subject: [PATCH 173/220] Use bitwise or As can be seen below, `True`/`False` are bits we can manipulate using logical operators. ``` >>> [(t_or_f.bit_length(), t_or_f.to_bytes()) for t_or_f in (False, True)] [(0, b'\x00'), (1, b'\x01')] >>> ``` --- tubesync/sync/fields.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 229baa13..07862936 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -62,7 +62,7 @@ def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: # I may change it to `option_list`, or a better alternative, later. for option in options: # Using `checked` instead of `selected` here is sub-optimal. - option["selected"] = option["selected"] or select_all + option["selected"] |= select_all ctx["multipleChoiceProperties"].append(option) return { 'widget': ctx } From b2ec287121a59f89e27815464cb2f2e7acd8be8b Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 14:03:42 -0500 Subject: [PATCH 174/220] Remove the outdated comment --- tubesync/sync/fields.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 07862936..e61e6f1b 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -61,7 +61,6 @@ def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: # That naming was a bit misleading, # I may change it to `option_list`, or a better alternative, later. for option in options: - # Using `checked` instead of `selected` here is sub-optimal. option["selected"] |= select_all ctx["multipleChoiceProperties"].append(option) From 0e9b473a942d357e76e08880ad15c905a59af6d3 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 14:08:52 -0500 Subject: [PATCH 175/220] Clarify the `options` variables --- tubesync/sync/fields.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index e61e6f1b..35411b1f 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -55,12 +55,10 @@ def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: select_all = (data.allow_all and data.all_choice in data.selected_choices) value = data.selected_choices ctx = super().get_context(name, value, attrs)['widget'] + options = ctx["optgroups"] ctx["multipleChoiceProperties"] = list() - for _group, options, _index in ctx["optgroups"]: - # `options` is a list containing a single dictionary. - # That naming was a bit misleading, - # I may change it to `option_list`, or a better alternative, later. - for option in options: + for _group, single_option_list, _index in options: + for option in single_option_list: option["selected"] |= select_all ctx["multipleChoiceProperties"].append(option) From 8494edb92a1b4559cb0be9026b98905a8fa5dbbe Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 14:35:04 -0500 Subject: [PATCH 176/220] Set `self.form_class` once --- tubesync/sync/fields.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 35411b1f..92c5fb45 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -71,6 +71,7 @@ class CommaSepChoiceField(models.CharField): Implements comma-separated storage of lists ''' + form_class = forms.MultipleChoiceField widget = CustomCheckboxSelectMultiple from common.logger import log @@ -127,8 +128,8 @@ def formfield(self, **kwargs): # This is a fairly standard way to set up some defaults # while letting the caller override them. defaults = { - 'form_class': forms.MultipleChoiceField, - # 'choices_form_class': forms.MultipleChoiceField, + 'form_class': self.form_class, + # 'choices_form_class': self.form_class, 'widget': self.widget, 'choices': self.get_all_choices(), 'label': '', @@ -138,7 +139,7 @@ def formfield(self, **kwargs): return super().formfield(**defaults) # This is a more compact way to do the same thing # return super().formfield(**{ - # 'form_class': forms.MultipleChoiceField, + # 'form_class': self.form_class, # **kwargs, # }) From a6b84b49b79806f94450bd13e120d06d11857e95 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 15:12:34 -0500 Subject: [PATCH 177/220] Preserve access to context --- tubesync/sync/fields.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 92c5fb45..83b7625b 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -47,22 +47,30 @@ class SponsorBlock_Category(models.TextChoices): class CustomCheckboxSelectMultiple(forms.CheckboxSelectMultiple): template_name = 'widgets/checkbox_select.html' option_template_name = 'widgets/checkbox_option.html' + from common.logger import log + + def format_value(self, value): + self.log.debug(f'widget_format_v:1: {type(value)} {repr(value)}') + return super().format_value(value) def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: data = value select_all = False if isinstance(data, CommaSepChoice): select_all = (data.allow_all and data.all_choice in data.selected_choices) - value = data.selected_choices - ctx = super().get_context(name, value, attrs)['widget'] - options = ctx["optgroups"] - ctx["multipleChoiceProperties"] = list() + value = list(data.selected_choices) + context = super().get_context(name, value, attrs) + widget = context['widget'] + options = widget['optgroups'] + _value = widget['multipleChoiceProperties'] if 'multipleChoiceProperties' in widget else (None, 'Key not in widget') + self.log.debug(f'widget_get_c:1: {type(_value)} {repr(_value)}') + widget['multipleChoiceProperties'] = list() for _group, single_option_list, _index in options: for option in single_option_list: - option["selected"] |= select_all - ctx["multipleChoiceProperties"].append(option) + option['selected'] |= select_all + widget['multipleChoiceProperties'].append(option) - return { 'widget': ctx } + return { 'widget': widget } # this is a database field! From 44c0fba2e7450ba8b4f88aef8a0935554400534b Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 15:37:41 -0500 Subject: [PATCH 178/220] Set choices for the ancestor classes --- tubesync/sync/fields.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 83b7625b..f3dba3ae 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -85,14 +85,15 @@ class CommaSepChoiceField(models.CharField): def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, **kwargs): kwargs.setdefault('max_length', 128) - super().__init__(*args, **kwargs) + kwargs.setdefault('choices', None) self.separator = str(separator) - self.possible_choices = possible_choices + self.possible_choices = possible_choices or choices self.selected_choices = list() self.allow_all = allow_all self.all_label = all_label self.all_choice = all_choice self.choices = self.get_all_choices() + super().__init__(*args, **kwargs) self.validators.clear() @@ -112,6 +113,7 @@ def value_to_string(self, obj): # standard functions for this class def deconstruct(self): name, path, args, kwargs = super().deconstruct() + del kwargs['choices'] if ',' != self.separator: kwargs['separator'] = self.separator kwargs['possible_choices'] = self.possible_choices From c994a1625368df74361b79179dd3bf15e7aaf9c9 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 16:21:50 -0500 Subject: [PATCH 179/220] Cleanup from testing --- tubesync/sync/fields.py | 14 ++------------ 1 file changed, 2 insertions(+), 12 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index f3dba3ae..2dd14619 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -47,6 +47,7 @@ class SponsorBlock_Category(models.TextChoices): class CustomCheckboxSelectMultiple(forms.CheckboxSelectMultiple): template_name = 'widgets/checkbox_select.html' option_template_name = 'widgets/checkbox_option.html' + # checked_attribute = {'checked': True, 'selected': True} from common.logger import log def format_value(self, value): @@ -85,7 +86,6 @@ class CommaSepChoiceField(models.CharField): def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice="", all_label="All", allow_all=False, **kwargs): kwargs.setdefault('max_length', 128) - kwargs.setdefault('choices', None) self.separator = str(separator) self.possible_choices = possible_choices or choices self.selected_choices = list() @@ -99,16 +99,11 @@ def __init__(self, *args, separator=",", possible_choices=(("","")), all_choice= # Override these functions to prevent unwanted behaviors def to_python(self, value): - self.log.debug(f'to_py:1: {type(value)} {repr(value)}') return value def get_internal_type(self): return super().get_internal_type() - # maybe useful? - def value_to_string(self, obj): - return self.value_from_object(obj) - # standard functions for this class def deconstruct(self): @@ -125,11 +120,6 @@ def deconstruct(self): kwargs['all_label'] = self.all_label return name, path, args, kwargs - # maybe useful? - def check(self, **kwargs): - errors = super().check(**kwargs) - return errors - # maybe useful? def validate(self, value, model_instance): super().validate(value, model_instance) @@ -141,7 +131,7 @@ def formfield(self, **kwargs): 'form_class': self.form_class, # 'choices_form_class': self.form_class, 'widget': self.widget, - 'choices': self.get_all_choices(), + 'choices': self.choices, 'label': '', 'required': False, } From 430b096fc8b5c14ec863e2bdb1571895dc8a55d8 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 16:31:30 -0500 Subject: [PATCH 180/220] Handle `self.choices` better Arrange for it to not be there, rather than removing it later. --- tubesync/sync/fields.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 2dd14619..9f5a3088 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -107,8 +107,10 @@ def get_internal_type(self): # standard functions for this class def deconstruct(self): + # set it back to the default for models.Field + # this way it is never in the returned values + self.choices = None name, path, args, kwargs = super().deconstruct() - del kwargs['choices'] if ',' != self.separator: kwargs['separator'] = self.separator kwargs['possible_choices'] = self.possible_choices From 280715e53e9099c78c553016828980940d6d317b Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 16:33:15 -0500 Subject: [PATCH 181/220] Restore `self.choices` after clearing it --- tubesync/sync/fields.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 9f5a3088..4c179a97 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -111,6 +111,7 @@ def deconstruct(self): # this way it is never in the returned values self.choices = None name, path, args, kwargs = super().deconstruct() + self.choices = self.get_all_choices() if ',' != self.separator: kwargs['separator'] = self.separator kwargs['possible_choices'] = self.possible_choices From 4aa7a09ef07cd45b0235746cd7a6604afe80b281 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 17:07:55 -0500 Subject: [PATCH 182/220] Do not call `CharField.formfield()` --- tubesync/sync/fields.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 4c179a97..b6bc644d 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -2,7 +2,7 @@ from functools import lru_cache from typing import Any, Dict from django import forms -from django.db import models +from django.db import connection, models from django.utils.translation import gettext_lazy as _ @@ -138,8 +138,13 @@ def formfield(self, **kwargs): 'label': '', 'required': False, } + # Keep the part from CharField we want, + # then call Field to skip the 'max_length' entry. + db_empty_string_as_null = connection.features.interprets_empty_strings_as_nulls + if self.null and not db_empty_string_as_null: + defaults['empty_value'] = None defaults.update(kwargs) - return super().formfield(**defaults) + return super(super(), self).formfield(**defaults) # This is a more compact way to do the same thing # return super().formfield(**{ # 'form_class': self.form_class, From 5ccda92be1b0906b76adff0e0141aebd1bce43cf Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 17:10:18 -0500 Subject: [PATCH 183/220] Use `grandparent` variable to make things clearer --- tubesync/sync/fields.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index b6bc644d..171ae67f 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -144,7 +144,8 @@ def formfield(self, **kwargs): if self.null and not db_empty_string_as_null: defaults['empty_value'] = None defaults.update(kwargs) - return super(super(), self).formfield(**defaults) + grandparent = super(super(), self) + return grandparent.formfield(**defaults) # This is a more compact way to do the same thing # return super().formfield(**{ # 'form_class': self.form_class, From 0a29a053e8e0cf72815e034fcc693076a6614fbf Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 18:23:51 -0500 Subject: [PATCH 184/220] Fixes from testing --- tubesync/sync/fields.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 171ae67f..8bc814e3 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -134,7 +134,7 @@ def formfield(self, **kwargs): 'form_class': self.form_class, # 'choices_form_class': self.form_class, 'widget': self.widget, - 'choices': self.choices, + 'choices': self.get_all_choices, 'label': '', 'required': False, } @@ -144,8 +144,7 @@ def formfield(self, **kwargs): if self.null and not db_empty_string_as_null: defaults['empty_value'] = None defaults.update(kwargs) - grandparent = super(super(), self) - return grandparent.formfield(**defaults) + return models.Field.formfield(self, **defaults) # This is a more compact way to do the same thing # return super().formfield(**{ # 'form_class': self.form_class, @@ -178,8 +177,12 @@ def get_prep_value(self, value): s_value = super().get_prep_value(value) self.log.debug(f'gpv:2: {type(s_value)} {repr(s_value)}') data = value - if isinstance(value, CommaSepChoice): - value = value.selected_choices + if not isinstance(data, CommaSepChoice): + # The data was lost; we can regenerate it. + args_dict = {key: self.__dict__[key] for key in CommaSepChoice._fields} + args_dict['selected_choices'] = list(value) + data = CommaSepChoice(**args_dict) + value = data.selected_choices if not isinstance(value, list): return '' if data.all_choice in value: From f08ef11f21ca57301993f00e2063ea84f7650806 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 19:15:39 -0500 Subject: [PATCH 185/220] Cleanup temporary logging --- tubesync/sync/fields.py | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/tubesync/sync/fields.py b/tubesync/sync/fields.py index 8bc814e3..a18518a1 100644 --- a/tubesync/sync/fields.py +++ b/tubesync/sync/fields.py @@ -47,12 +47,8 @@ class SponsorBlock_Category(models.TextChoices): class CustomCheckboxSelectMultiple(forms.CheckboxSelectMultiple): template_name = 'widgets/checkbox_select.html' option_template_name = 'widgets/checkbox_option.html' + # perhaps set the 'selected' attribute too? # checked_attribute = {'checked': True, 'selected': True} - from common.logger import log - - def format_value(self, value): - self.log.debug(f'widget_format_v:1: {type(value)} {repr(value)}') - return super().format_value(value) def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: data = value @@ -63,8 +59,7 @@ def get_context(self, name: str, value: Any, attrs) -> Dict[str, Any]: context = super().get_context(name, value, attrs) widget = context['widget'] options = widget['optgroups'] - _value = widget['multipleChoiceProperties'] if 'multipleChoiceProperties' in widget else (None, 'Key not in widget') - self.log.debug(f'widget_get_c:1: {type(_value)} {repr(_value)}') + # This is a new key in widget widget['multipleChoiceProperties'] = list() for _group, single_option_list, _index in options: for option in single_option_list: @@ -123,10 +118,6 @@ def deconstruct(self): kwargs['all_label'] = self.all_label return name, path, args, kwargs - # maybe useful? - def validate(self, value, model_instance): - super().validate(value, model_instance) - def formfield(self, **kwargs): # This is a fairly standard way to set up some defaults # while letting the caller override them. @@ -134,6 +125,7 @@ def formfield(self, **kwargs): 'form_class': self.form_class, # 'choices_form_class': self.form_class, 'widget': self.widget, + # use a callable for choices 'choices': self.get_all_choices, 'label': '', 'required': False, @@ -173,9 +165,6 @@ def get_prep_value(self, value): ''' Create a value to be stored in the database. ''' - self.log.debug(f'gpv:1: {type(value)} {repr(value)}') - s_value = super().get_prep_value(value) - self.log.debug(f'gpv:2: {type(s_value)} {repr(s_value)}') data = value if not isinstance(data, CommaSepChoice): # The data was lost; we can regenerate it. @@ -183,6 +172,10 @@ def get_prep_value(self, value): args_dict['selected_choices'] = list(value) data = CommaSepChoice(**args_dict) value = data.selected_choices + s_value = super().get_prep_value(value) + if set(s_value) != set(value): + self.log.warn(f'CommaSepChoiceField:get_prep_value: values did not match. ' + f'CommaSepChoiceField({value}) versus CharField({s_value})') if not isinstance(value, list): return '' if data.all_choice in value: From 90b9da00ebba9b82ee01ecee436f3cd44954951a Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 20:29:41 -0500 Subject: [PATCH 186/220] Update 0027_alter_source_sponsorblock_categories.py --- .../migrations/0027_alter_source_sponsorblock_categories.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py b/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py index 92fbc98a..c81b8e72 100644 --- a/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py +++ b/tubesync/sync/migrations/0027_alter_source_sponsorblock_categories.py @@ -14,6 +14,6 @@ class Migration(migrations.Migration): migrations.AlterField( model_name='source', name='sponsorblock_categories', - field=sync.fields.CommaSepChoiceField(default='all', help_text='Select the sponsorblocks you want to enforce', possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), + field=sync.fields.CommaSepChoiceField(all_choice='all', all_label='(All Categories)', allow_all=True, default='all', help_text='Select the SponsorBlock categories that you wish to be removed from downloaded videos.', max_length=128, possible_choices=[('sponsor', 'Sponsor'), ('intro', 'Intermission/Intro Animation'), ('outro', 'Endcards/Credits'), ('selfpromo', 'Unpaid/Self Promotion'), ('preview', 'Preview/Recap'), ('filler', 'Filler Tangent'), ('interaction', 'Interaction Reminder'), ('music_offtopic', 'Non-Music Section')], verbose_name=''), ), ] From 2c9a80ec10ef3b4e0e8718f6040a3000bc4ccb3e Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 20:36:21 -0500 Subject: [PATCH 187/220] Update models.py --- tubesync/sync/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/models.py b/tubesync/sync/models.py index 2ff176ca..2daeb094 100644 --- a/tubesync/sync/models.py +++ b/tubesync/sync/models.py @@ -122,9 +122,9 @@ class Source(models.Model): possible_choices=SponsorBlock_Category.choices, all_choice='all', allow_all=True, - all_label='(all options)', + all_label='(All Categories)', default='all', - help_text=_('Select the sponsorblocks you want to enforce') + help_text=_('Select the SponsorBlock categories that you wish to be removed from downloaded videos.') ) embed_metadata = models.BooleanField( _('embed metadata'), From f7840159fdafbf225864ea34bf29fa650c7ad047 Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 23:40:10 -0500 Subject: [PATCH 188/220] Add tests for `sponsorblock_categories` --- tubesync/sync/tests.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/tubesync/sync/tests.py b/tubesync/sync/tests.py index 1a9e9ed2..5f285de5 100644 --- a/tubesync/sync/tests.py +++ b/tubesync/sync/tests.py @@ -190,6 +190,7 @@ def test_source(self): 'prefer_60fps': False, 'prefer_hdr': False, 'fallback': 'f', + 'sponsorblock_categories': ('preview', 'sponsor',), 'sub_langs': 'en', } response = c.post('/source-add', data) @@ -203,6 +204,9 @@ def test_source(self): source_uuid = path_parts[1] source = Source.objects.get(pk=source_uuid) self.assertEqual(str(source.pk), source_uuid) + # Check that the SponsorBlock categories were saved + self.assertEqual(source.sponsorblock_categories.selected_choices, + ['sponsor', 'preview']) # Check a task was created to index the media for the new source source_uuid = str(source.pk) task = Task.objects.get_task('sync.tasks.index_source_task', @@ -215,6 +219,13 @@ def test_source(self): # Check the source detail page loads response = c.get(f'/source/{source_uuid}') self.assertEqual(response.status_code, 200) + # save and refresh the Source + source.refresh_from_db() + source.save() + source.refresh_from_db() + # Check that the SponsorBlock categories remain saved + self.assertEqual(source.sponsorblock_categories.selected_choices, + ['sponsor', 'preview']) # Update the source key data = { 'source_type': Source.SOURCE_TYPE_YOUTUBE_CHANNEL, @@ -234,6 +245,7 @@ def test_source(self): 'prefer_60fps': False, 'prefer_hdr': False, 'fallback': Source.FALLBACK_FAIL, + 'sponsorblock_categories': ('preview', 'sponsor',), 'sub_langs': 'en', } response = c.post(f'/source-update/{source_uuid}', data) @@ -247,6 +259,9 @@ def test_source(self): source_uuid = path_parts[1] source = Source.objects.get(pk=source_uuid) self.assertEqual(source.key, 'updatedkey') + # Check that the SponsorBlock categories remain saved + self.assertEqual(source.sponsorblock_categories.selected_choices, + ['sponsor', 'preview']) # Update the source index schedule which should recreate the scheduled task data = { 'source_type': Source.SOURCE_TYPE_YOUTUBE_CHANNEL, From cb8d0a051f0afefccf50595a90c578e28255154c Mon Sep 17 00:00:00 2001 From: tcely Date: Sat, 1 Feb 2025 23:54:01 -0500 Subject: [PATCH 189/220] Use variables for tests The order of the submitted values is preserved. --- tubesync/sync/tests.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/tubesync/sync/tests.py b/tubesync/sync/tests.py index 5f285de5..03894819 100644 --- a/tubesync/sync/tests.py +++ b/tubesync/sync/tests.py @@ -172,6 +172,8 @@ def test_source(self): response = c.get('/source-add') self.assertEqual(response.status_code, 200) # Create a new source + data_categories = ('sponsor', 'preview',) + exected_categories = ['sponsor', 'preview'] data = { 'source_type': 'c', 'key': 'testkey', @@ -190,7 +192,7 @@ def test_source(self): 'prefer_60fps': False, 'prefer_hdr': False, 'fallback': 'f', - 'sponsorblock_categories': ('preview', 'sponsor',), + 'sponsorblock_categories': data_categories, 'sub_langs': 'en', } response = c.post('/source-add', data) @@ -206,7 +208,7 @@ def test_source(self): self.assertEqual(str(source.pk), source_uuid) # Check that the SponsorBlock categories were saved self.assertEqual(source.sponsorblock_categories.selected_choices, - ['sponsor', 'preview']) + exected_categories) # Check a task was created to index the media for the new source source_uuid = str(source.pk) task = Task.objects.get_task('sync.tasks.index_source_task', @@ -225,7 +227,7 @@ def test_source(self): source.refresh_from_db() # Check that the SponsorBlock categories remain saved self.assertEqual(source.sponsorblock_categories.selected_choices, - ['sponsor', 'preview']) + exected_categories) # Update the source key data = { 'source_type': Source.SOURCE_TYPE_YOUTUBE_CHANNEL, @@ -245,7 +247,7 @@ def test_source(self): 'prefer_60fps': False, 'prefer_hdr': False, 'fallback': Source.FALLBACK_FAIL, - 'sponsorblock_categories': ('preview', 'sponsor',), + 'sponsorblock_categories': data_categories, 'sub_langs': 'en', } response = c.post(f'/source-update/{source_uuid}', data) @@ -260,8 +262,9 @@ def test_source(self): source = Source.objects.get(pk=source_uuid) self.assertEqual(source.key, 'updatedkey') # Check that the SponsorBlock categories remain saved + source.refresh_from_db() self.assertEqual(source.sponsorblock_categories.selected_choices, - ['sponsor', 'preview']) + exected_categories) # Update the source index schedule which should recreate the scheduled task data = { 'source_type': Source.SOURCE_TYPE_YOUTUBE_CHANNEL, @@ -281,6 +284,7 @@ def test_source(self): 'prefer_60fps': False, 'prefer_hdr': False, 'fallback': Source.FALLBACK_FAIL, + 'sponsorblock_categories': data_categories, 'sub_langs': 'en', } response = c.post(f'/source-update/{source_uuid}', data) @@ -293,6 +297,9 @@ def test_source(self): self.assertEqual(path_parts[0], 'source') source_uuid = path_parts[1] source = Source.objects.get(pk=source_uuid) + # Check that the SponsorBlock categories remain saved + self.assertEqual(source.sponsorblock_categories.selected_choices, + exected_categories) # Check a new task has been created by seeing if the pk has changed new_task = Task.objects.get_task('sync.tasks.index_source_task', args=(source_uuid,))[0] From 8c33ce719d660f94a521273abe202669f2ba79d5 Mon Sep 17 00:00:00 2001 From: tcely Date: Sun, 2 Feb 2025 00:06:39 -0500 Subject: [PATCH 190/220] Pre-resolve conflict --- tubesync/sync/templates/widgets/checkbox_option.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tubesync/sync/templates/widgets/checkbox_option.html b/tubesync/sync/templates/widgets/checkbox_option.html index db32a457..739eb782 100644 --- a/tubesync/sync/templates/widgets/checkbox_option.html +++ b/tubesync/sync/templates/widgets/checkbox_option.html @@ -1,7 +1,7 @@ -