Skip to content

Commit

Permalink
异步Worker 8
Browse files Browse the repository at this point in the history
  • Loading branch information
lee-cq committed Mar 23, 2024
1 parent 3c1ae77 commit f498916
Show file tree
Hide file tree
Showing 6 changed files with 78 additions and 14 deletions.
7 changes: 7 additions & 0 deletions .github/workflows/alist-sync.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -119,3 +119,10 @@ jobs:
- name: Debugger
if: ${{ github.event.inputs.debug == 'true' && failure() }}
uses: csexton/debugger-action@master

- name: Upload Logs
if: always()
uses: actions/upload-artifact@v4
with:
name: logs
path: logs/*
29 changes: 28 additions & 1 deletion alist_sync/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import datetime
import hashlib
import logging
import re
import selectors
import sys
import threading
Expand Down Expand Up @@ -105,7 +106,7 @@ def timeout_input(msg, default, timeout=3):
return default


def beautify_size(byte_size: float):
def beautify_size(byte_size: float) -> str:
if byte_size < 1024:
return f"{byte_size:.2f}B"
byte_size /= 1024
Expand All @@ -118,6 +119,32 @@ def beautify_size(byte_size: float):
return f"{byte_size:.2f}GB"


def data_size_to_bytes(data_size: str) -> int:
"""数据大小转换为字节"""
data_size = data_size.strip()
if data_size == "-1":
return -1
units = {
"B": 1,
"KB": 1024,
"MB": 1024**2,
"GB": 1024**3,
}
match = re.match(r"^(\d+(\.\d+)?)\s*([a-zA-Z]+)$", data_size)
if not match:
raise ValueError("Invalid data size format")
# 提取数值和单位
size_number = float(match.group(1))
unit = match.group(2).upper()
unit = unit if unit else "B"
unit = unit if unit.endswith("B") else unit + "B"
# 检查单位是否有效
if unit not in units:
raise ValueError(f"Invalid unit: {unit}")
# 计算字节数并返回
return int(size_number * units[unit])


def transfer_speed(size, start: datetime.datetime, end: datetime.datetime) -> str:
"""转换速度"""
speed = (size * 2) / (end - start).seconds
Expand Down
24 changes: 21 additions & 3 deletions alist_sync/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
from pydantic import Field, BaseModel, BeforeValidator
from pymongo.database import Database

from alist_sync.common import data_size_to_bytes


if TYPE_CHECKING:
from alist_sync.data_handle import ShelveHandle, MongoHandle
Expand Down Expand Up @@ -136,13 +138,17 @@ def __hash__(self):

_id: str = getenv("ALIST_SYNC_NAME", "alist-sync")

cache__dir: Path = Field(
_cache_dir: Path = Field(
default=getenv(
"ALIST_SYNC_CACHE_DIR",
Path(__file__).parent / ".alist-sync-cache",
),
alias="cache_dir",
)
_cache_max_size: Annotated[int, BeforeValidator(data_size_to_bytes)] = Field(
getenv("ALIST_SYNC_CACHE_MAX_SIZE", "0"),
alias="cache_max_size",
)

timeout: int = Field(10)
ua: str = None
Expand Down Expand Up @@ -178,8 +184,20 @@ def start_time(self) -> int:

@cached_property
def cache_dir(self) -> Path:
self.cache__dir.mkdir(exist_ok=True, parents=True)
return self.cache__dir
self._cache_dir.mkdir(exist_ok=True, parents=True)
return self._cache_dir

@cached_property
def cache_max_size(self) -> int:
if self._cache_max_size > 0:
return int(self._cache_max_size)

import psutil

if self._cache_max_size == 0:
return psutil.disk_usage(self.cache_dir.__str__()).free // 2
else:
return psutil.disk_usage(self.cache_dir.__str__()).free

@lru_cache(10)
def get_server(self, base_url) -> AlistServer:
Expand Down
26 changes: 17 additions & 9 deletions alist_sync/temp_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,17 @@
@Date-Time : 2024/3/23 14:02
"""
import time
from pathlib import Path
from typing import Literal


from alist_sdk import AlistPathType, AlistPath
from pydantic import BaseModel

from alist_sync.common import GB
from alist_sync.config import create_config

SYNC_CONFIG = create_config()


class TempFile(BaseModel):
Expand All @@ -25,6 +29,7 @@ class TempFile(BaseModel):
class TempFiles(BaseModel):
pre_size: int = 0
tmp_files: dict[Path, TempFile] = {}
max_size: int = SYNC_CONFIG.cache_max_size

def __del__(self):
for fp in self.tmp_files.keys():
Expand All @@ -40,19 +45,23 @@ def add_tmp(self, path: Path, remote_file: str | AlistPath):
)

def done_tmp(self, path: Path):
"""使用完成"""
assert path in self.tmp_files, f"没有找到path: {path}"
self.tmp_files[path].refer_times -= 1
# if self.tmp_files[path] <= 0:
# self.clear_file(path)

def clear_file(self, path: Path):
"""清理文件"""
path.unlink()
del self.tmp_files[path]

def pre_total_size(self):
"""预计总大小"""
return sum(_.remote_path.stat().size for _ in self.tmp_files.values())

def total_size(self):
"""实际总大小"""
return sum(fp.stat().st_size for fp in self.tmp_files.keys() if fp.exists())

def status(self) -> tuple[int, int, int, str]:
Expand All @@ -70,11 +79,10 @@ def status(self) -> tuple[int, int, int, str]:
)

def auto_clear(self):
if self.total_size() < 10 * GB:
return
[
self.clear_file(path)
for path, t in self.tmp_files.items()
if t.refer_times <= 0
]
return self.auto_clear()
while self.total_size() > self.max_size:
[
self.clear_file(path)
for path, t in self.tmp_files.items()
if t.refer_times <= 0
]
time.sleep(1)
3 changes: 3 additions & 0 deletions config-template.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@ mongodb_uri: "mongodb+srv://${username}:${password}@${host}/alist_sync?retryWrit

# 缓存文件夹
cache_dir: ./.alist-sync-cache
# cache_max_size: 最大使用的缓存大小,单位为字节.
# 默认值: cache_dir 所在磁盘的剩余空间的 1/2, -1表示整个磁盘(不限制), 0表示使用1/2磁盘空间
cache_max_size: 0

# 是否以Daemon模式运行
daemon: false
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ dependencies = [
"async-lru",
"pydantic",
"pyyaml",
"pymongo"
"pymongo",
"psutil",
]
dynamic = ["version"]

Expand Down

0 comments on commit f498916

Please sign in to comment.