Skip to content

Commit

Permalink
implemented a simple cache run
Browse files Browse the repository at this point in the history
  • Loading branch information
Barakudum committed Feb 3, 2024
1 parent a86aab3 commit 79bf9dd
Show file tree
Hide file tree
Showing 4 changed files with 148 additions and 71 deletions.
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ python-magic = "*"
config-library = {extras = ["yaml"], version = "*"}
flask-compress = "*"
waitress = "*"
schedule = "*"

[dev-packages]
better-exceptions = "*"
Expand Down
132 changes: 70 additions & 62 deletions Pipfile.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

38 changes: 29 additions & 9 deletions src/jarklin/cache/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from functools import cached_property
from configlib import ConfigInterface
from ..common.types import InfoEntry
from ..common.dot_ignore import DotIgnore
from ..common import dot_ignore, scheduling
from ._cache_generator import CacheGenerator
from .video import VideoCacheGenerator
from .gallery import GalleryCacheGenerator
Expand All @@ -23,12 +23,12 @@

class Cache:
def __init__(self, config: ConfigInterface) -> None:
self._shutdown: bool = False
self._shutdown_event = None
self._config = config

@cached_property
def ignorer(self) -> 'DotIgnore':
return DotIgnore(
def ignorer(self) -> 'dot_ignore.DotIgnore':
return dot_ignore.DotIgnore(
*self._config.getsplit('cache', 'ignore', fallback=[]),
".*", # .jarklin/ | .jarklin.{ext}
root=self.root,
Expand All @@ -50,11 +50,31 @@ def jarklin_cache(self) -> Path:
directory.mkdir(parents=True, exist_ok=True)
return directory

# todo: replace with file-system-monitoring
def run(self) -> None:
self.iteration()
import time
import schedule

scheduler = schedule.Scheduler()
scheduler.every(1).hour.at(":00").do(self.iteration)
shutdown_event, thread = scheduling.run_continuously(scheduler, interval=5)
self._shutdown_event = shutdown_event
try:
while thread.is_alive():
time.sleep(5) # tiny bit larger for less resources
except KeyboardInterrupt:
logging.info("shutdown signal received. graceful shutdown")
# attempt a graceful shutdown
shutdown_event.set()
while thread.is_alive():
time.sleep(1)
finally:
self._shutdown_event = None

def shutdown(self) -> None:
self._shutdown = True
if self._shutdown_event is None:
raise RuntimeError("cache is not running")
self._shutdown_event.set()

def remove(self, ignore_errors: bool = False) -> None:
shutil.rmtree(self.jarklin_path, ignore_errors=ignore_errors)
Expand All @@ -79,7 +99,7 @@ def generate(self) -> None:
info: t.List[InfoEntry] = []
generators: t.List[CacheGenerator] = self.find_generators()

def generate_info():
def generate_info_file():
logging.info("generating info.json")
with open(self.root.joinpath('.jarklin/info.json'), 'w') as fp:
fp.write(json.dumps(info))
Expand All @@ -95,7 +115,7 @@ def generate_info():
except Exception as error:
logging.error(f"Cache: generation failed ({generator})", exc_info=error)
continue
generate_info()
generate_info_file()
info.append(InfoEntry(
path=str(source.relative_to(self.root)),
name=source.stem,
Expand All @@ -105,7 +125,7 @@ def generate_info():
meta=json.loads(dest.joinpath("meta.json").read_bytes()),
))

generate_info()
generate_info_file()

def find_generators(self) -> t.List[CacheGenerator]:
generators: t.List[CacheGenerator] = []
Expand Down
48 changes: 48 additions & 0 deletions src/jarklin/common/scheduling.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# -*- coding=utf-8 -*-
r"""
"""
import time
import logging
import functools
import threading
import schedule


def catch_exceptions(cancel_on_failure=False):
def decorator(job_func):
@functools.wraps(job_func)
def wrapper(*args, **kwargs):
try:
return job_func(*args, **kwargs)
except Exception as error:
logging.error(f"task {job_func.__name__} failed with ({type(error).__name__}", exc_info=error)
if cancel_on_failure:
return schedule.CancelJob
return wrapper
return decorator


def run_continuously(scheduler: schedule.Scheduler, interval: int = 1):
"""Continuously run, while executing pending jobs at each
elapsed time interval.
@return cease_continuous_run: threading. Event which can
be set to cease continuous run. Please note that it is
*intended behavior that run_continuously() does not run
missed jobs*. For example, if you've registered a job that
should run every minute, and you set a continuous run
interval of one hour then your job won't be run 60 times
at each interval but only once.
"""
cease_continuous_run = threading.Event()

def runner():
while not cease_continuous_run.is_set():
logging.debug("running pending jobs")
scheduler.run_pending()
logging.debug("waiting till next job run")
time.sleep(interval)

continuous_thread = threading.Thread(target=runner, name="scheduler")
continuous_thread.start()
return cease_continuous_run, continuous_thread

0 comments on commit 79bf9dd

Please sign in to comment.