diff --git a/github_app_geo_project/module/__init__.py b/github_app_geo_project/module/__init__.py
index 744c4fc80e..f9d91e2aed 100644
--- a/github_app_geo_project/module/__init__.py
+++ b/github_app_geo_project/module/__init__.py
@@ -363,7 +363,7 @@ def cleanup(self, context: CleanupContext[_EVENT_DATA]) -> None:
del context
@abstractmethod
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema of the module configuration."""
super_ = [c for c in self.__class__.__orig_bases__ if c.__origin__ == Module][0] # type: ignore[attr-defined] # pylint: disable=no-member
generic_element = super_.__args__[0]
diff --git a/github_app_geo_project/module/audit/__init__.py b/github_app_geo_project/module/audit/__init__.py
index 9e93103dee..a594aaf6c1 100644
--- a/github_app_geo_project/module/audit/__init__.py
+++ b/github_app_geo_project/module/audit/__init__.py
@@ -2,7 +2,6 @@
import asyncio
import datetime
-import glob
import json
import logging
import os
@@ -11,6 +10,7 @@
import subprocess # nosec
import tempfile
import urllib.parse
+from pathlib import Path
from typing import Any, cast
import github
@@ -157,8 +157,9 @@ async def _process_snyk_dpkg(
logs_url = urllib.parse.urljoin(context.service_url, f"logs/{context.job_id}")
if context.module_event_data.type == "snyk":
python_version = ""
- if os.path.exists(".tool-versions"):
- with open(".tool-versions", encoding="utf-8") as file:
+ tool_versions = Path(".tool-versions")
+ if tool_versions.exists():
+ with tool_versions.open(encoding="utf-8") as file:
for line in file:
if line.startswith("python "):
python_version = ".".join(line.split(" ")[1].split(".")[0:2]).strip()
@@ -197,8 +198,11 @@ async def _process_snyk_dpkg(
if context.module_event_data.type == "dpkg":
body_md = "Update dpkg packages"
- if os.path.exists("ci/dpkg-versions.yaml") or os.path.exists(
- ".github/dpkg-versions.yaml",
+ if (
+ Path("ci/dpkg-versions.yaml").exists()
+ or Path(
+ ".github/dpkg-versions.yaml",
+ ).exists()
):
await audit_utils.dpkg(
context.module_config.get("dpkg", {}),
@@ -339,7 +343,7 @@ async def _use_python_version(python_version: str) -> dict[str, str]:
# Get path from /pyenv/versions/{python_version}.*/bin/
env = os.environ.copy()
- bin_paths = glob.glob(f"/pyenv/versions/{python_version}.*/bin")
+ bin_paths = Path("/pyenv/versions/").glob(f"{python_version}.*/bin")
if bin_paths:
env["PATH"] = f'{bin_paths[0]}:{env["PATH"]}'
@@ -464,12 +468,12 @@ async def process(
raise
if security_file is not None:
key_starts.append(_OUTDATED)
- issue_check.add_check("outdated", "Check outdated version", False)
+ issue_check.add_check("outdated", "Check outdated version", checked=False)
else:
issue_check.remove_check("outdated")
if context.module_config.get("snyk", {}).get("enabled", configuration.ENABLE_SNYK_DEFAULT):
- issue_check.add_check("snyk", "Check security vulnerabilities with Snyk", False)
+ issue_check.add_check("snyk", "Check security vulnerabilities with Snyk", checked=False)
key_starts.append("Snyk check/fix ")
else:
issue_check.remove_check("snyk")
@@ -492,7 +496,7 @@ async def process(
context.module_config.get("dpkg", {}).get("enabled", configuration.ENABLE_DPKG_DEFAULT)
and dpkg_version is not None
):
- issue_check.add_check("dpkg", "Update dpkg packages", False)
+ issue_check.add_check("dpkg", "Update dpkg packages", checked=False)
key_starts.append("Dpkg ")
else:
issue_check.remove_check("dpkg")
@@ -563,9 +567,9 @@ async def process(
return _get_process_output(context, issue_check, short_message, success)
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema of the module configuration."""
- with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
+ with (Path(__file__) / "schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("properties", {}).get("audit") # type: ignore[no-any-return]
def get_github_application_permissions(self) -> module.GitHubApplicationPermissions:
@@ -599,7 +603,6 @@ def get_transversal_dashboard(
},
)
return module.TransversalDashboardOutput(
- # template="dashboard.html",
renderer="github_app_geo_project:module/audit/dashboard.html",
data={"repositories": repositories},
)
diff --git a/github_app_geo_project/module/audit/utils.py b/github_app_geo_project/module/audit/utils.py
index 65f56b2ee4..f08e854188 100644
--- a/github_app_geo_project/module/audit/utils.py
+++ b/github_app_geo_project/module/audit/utils.py
@@ -6,6 +6,7 @@
import logging
import os.path
import subprocess
+from pathlib import Path
from typing import NamedTuple
import apt_repo
@@ -297,7 +298,7 @@ async def _install_poetry_dependencies(
f"Dependencies installed from {file}",
f"Error while installing the dependencies from {file}",
f"Timeout while installing the dependencies from {file}",
- os.path.dirname(os.path.abspath(file)),
+ Path(file).resolve().parent,
)
if proc_message is not None:
result.append(proc_message)
@@ -578,7 +579,7 @@ async def _snyk_fix(
)
cwd = module_utils.get_cwd()
- project = "-" if cwd is None else os.path.basename(cwd)
+ project = "-" if cwd is None else Path(cwd).name
message = module_utils.HtmlMessage(
"
\n".join(
[
@@ -601,7 +602,7 @@ async def _npm_audit_fix(
messages: set[str] = set()
fix_success = True
for package_lock_file_name, file_messages in fixable_files_npm.items():
- directory = os.path.dirname(os.path.abspath(package_lock_file_name))
+ directory = Path(package_lock_file_name).absolute().parent
messages.update(file_messages)
_LOGGER.debug("Fixing vulnerabilities in %s with npm audit fix", package_lock_file_name)
command = ["npm", "audit", "fix"]
@@ -612,19 +613,19 @@ async def _npm_audit_fix(
"Npm audit fix",
"Error while fixing the project",
"Timeout while fixing the project",
- directory,
+ str(directory),
)
if message is not None:
result.append(message)
_LOGGER.debug("Fixing version in %s", package_lock_file_name)
# Remove the add '~' in the version in the package.json
- with open(os.path.join(directory, "package.json"), encoding="utf-8") as package_file:
+ with (directory / "package.json").open(encoding="utf-8") as package_file:
package_json = json.load(package_file)
for dependencies_type in ("dependencies", "devDependencies"):
for package, version in package_json.get(dependencies_type, {}).items():
if version.startswith("^"):
package_json[dependencies_type][package] = version[1:]
- with open(os.path.join(directory, "package.json"), "w", encoding="utf-8") as package_file:
+ with (directory / "package.json").open("w", encoding="utf-8") as package_file:
json.dump(package_json, package_file, indent=2)
_LOGGER.debug("Succeeded fix %s", package_lock_file_name)
@@ -644,8 +645,8 @@ def outdated_versions(
for row in security.data:
str_date = row[date_index]
if str_date not in ("Unsupported", "Best effort", "To be defined"):
- date = datetime.datetime.strptime(row[date_index], "%d/%m/%Y")
- if date < datetime.datetime.now():
+ date = datetime.datetime.strptime(row[date_index], "%d/%m/%Y").replace(tzinfo=datetime.UTC)
+ if date < datetime.datetime.now(datetime.UTC):
errors.append(
f"The version '{row[version_index]}' is outdated, it can be set to "
"'Unsupported', 'Best effort' or 'To be defined'",
@@ -667,7 +668,8 @@ def _get_sources(
if dist not in _SOURCES:
conf = local_config.get("sources", config.get("sources", configuration.DPKG_SOURCES_DEFAULT))
if dist not in conf:
- raise ValueError(f"The distribution {dist} is not in the configuration")
+ message = f"The distribution {dist} is not in the configuration"
+ raise ValueError(message)
_SOURCES[dist] = apt_repo.APTSources(
[
apt_repo.APTRepository(
@@ -694,7 +696,7 @@ def _get_sources(
exception,
)
except AttributeError as exception:
- _LOGGER.error("Error while loading the distribution %s: %s", dist, exception)
+ _LOGGER.error("Error while loading the distribution %s: %s", dist, exception) # noqa: TRY400
return _SOURCES[dist]
@@ -708,12 +710,13 @@ async def _get_packages_version(
global _GENERATION_TIME # pylint: disable=global-statement
if (
_GENERATION_TIME is None
- or datetime.datetime.now() - utils.parse_duration(os.environ.get("GHCI_DPKG_CACHE_DURATION", "3h"))
+ or datetime.datetime.now(datetime.UTC)
+ - utils.parse_duration(os.environ.get("GHCI_DPKG_CACHE_DURATION", "3h"))
> _GENERATION_TIME
):
_PACKAGE_VERSION.clear()
_SOURCES.clear()
- _GENERATION_TIME = datetime.datetime.now()
+ _GENERATION_TIME = datetime.datetime.now(datetime.UTC)
if package not in _PACKAGE_VERSION:
dist = package.split("/")[0]
await asyncio.to_thread(_get_sources, dist, config, local_config)
@@ -727,15 +730,17 @@ async def dpkg(
local_config: configuration.DpkgConfiguration,
) -> None:
"""Update the version of packages in the file .github/dpkg-versions.yaml or ci/dpkg-versions.yaml."""
- if not os.path.exists("ci/dpkg-versions.yaml") and not os.path.exists(".github/dpkg-versions.yaml"):
+ ci_dpkg_versions_filename = Path(".github/dpkg-versions.yaml")
+ github_dpkg_versions_filename = Path("ci/dpkg-versions.yaml")
+
+ if not ci_dpkg_versions_filename.exists() and not github_dpkg_versions_filename.exists():
_LOGGER.warning("The file .github/dpkg-versions.yaml or ci/dpkg-versions.yaml does not exist")
dpkg_versions_filename = (
- ".github/dpkg-versions.yaml"
- if os.path.exists(".github/dpkg-versions.yaml")
- else "ci/dpkg-versions.yaml"
+ github_dpkg_versions_filename if github_dpkg_versions_filename.exists() else ci_dpkg_versions_filename
)
- with open(dpkg_versions_filename, encoding="utf-8") as versions_file:
+
+ with dpkg_versions_filename.open(encoding="utf-8") as versions_file:
versions_config = yaml.load(versions_file, Loader=yaml.SafeLoader)
for versions in versions_config.values():
for package_full in versions:
@@ -768,5 +773,5 @@ async def dpkg(
exception,
)
- with open(dpkg_versions_filename, "w", encoding="utf-8") as versions_file:
+ with dpkg_versions_filename.open("w", encoding="utf-8") as versions_file:
yaml.dump(versions_config, versions_file, Dumper=yaml.SafeDumper)
diff --git a/github_app_geo_project/module/backport/__init__.py b/github_app_geo_project/module/backport/__init__.py
index 4824bea6ef..1115b363af 100644
--- a/github_app_geo_project/module/backport/__init__.py
+++ b/github_app_geo_project/module/backport/__init__.py
@@ -6,6 +6,7 @@
import os.path
import subprocess # nosec
import tempfile
+from pathlib import Path
from typing import Any
import github
@@ -56,9 +57,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request", "push"},
)
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the module."""
- with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
+ with (Path(__file__).parent / "schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("properties", {}).get("backport") # type: ignore[no-any-return]
def get_actions(self, context: module.GetActionContext) -> list[module.Action[_ActionData]]:
@@ -285,7 +286,7 @@ async def _backport(
f"git push origin {backport_branch} --force",
],
)
- with open("BACKPORT_TODO", "w", encoding="utf-8") as f:
+ with Path("BACKPORT_TODO").open("w", encoding="utf-8") as f:
f.write("\n".join(message))
command = ["git", "add", "BACKPORT_TODO"]
proc = await asyncio.create_subprocess_exec(*command)
diff --git a/github_app_geo_project/module/clean/__init__.py b/github_app_geo_project/module/clean/__init__.py
index ec298fdcbb..ed82b7b511 100644
--- a/github_app_geo_project/module/clean/__init__.py
+++ b/github_app_geo_project/module/clean/__init__.py
@@ -6,6 +6,7 @@
import os.path
import subprocess # nosec
import tempfile
+from pathlib import Path
from typing import Any, cast
import aiohttp
@@ -52,9 +53,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request", "delete"},
)
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the module."""
- with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
+ with (Path(__file__).parent / "schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("properties", {}).get("clean") # type: ignore[no-any-return]
def get_actions(self, context: module.GetActionContext) -> list[module.Action[_ActionData]]:
@@ -130,7 +131,7 @@ async def _clean_docker(
),
),
)
- name = tag_publish.get_value(*pull_match)
+ name = tag_publish.get_value(*pull_match) # noqa: PLW2901
for repo in (
publish_config.get("docker", {})
@@ -154,7 +155,7 @@ async def _clean_docker(
continue
for image in publish_config.get("docker", {}).get("images", []):
for tag in image.get("tags", []):
- tag = tag.format(version=name)
+ tag = tag.format(version=name) # noqa: PLW2901
_LOGGER.info("Cleaning %s/%s:%s", host, image["name"], tag)
if host == "docker.io":
diff --git a/github_app_geo_project/module/delete_old_workflow_runs/__init__.py b/github_app_geo_project/module/delete_old_workflow_runs/__init__.py
index 68830a1f48..ad233f9c69 100644
--- a/github_app_geo_project/module/delete_old_workflow_runs/__init__.py
+++ b/github_app_geo_project/module/delete_old_workflow_runs/__init__.py
@@ -4,7 +4,7 @@
import datetime
import json
import logging
-import os.path
+from pathlib import Path
from typing import Any
from github_app_geo_project import module
@@ -42,9 +42,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[di
return [module.Action(data={})]
return []
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the module configuration."""
- with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
+ with (Path(__file__).parent / "schema.json").open(encoding="utf-8") as schema_file:
schema = json.loads(schema_file.read())
for key in ("$schema", "$id"):
if key in schema:
@@ -82,7 +82,7 @@ async def process(
status = rule.get("status")
arguments = {
- "created": f"<{datetime.datetime.now() - datetime.timedelta(days=older_than_days):%Y-%m-%d}",
+ "created": f"<{datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=older_than_days):%Y-%m-%d}",
}
if actor:
arguments["actor"] = actor
diff --git a/github_app_geo_project/module/pull_request/checks.py b/github_app_geo_project/module/pull_request/checks.py
index 85dd9fbc57..a74ba0ca12 100644
--- a/github_app_geo_project/module/pull_request/checks.py
+++ b/github_app_geo_project/module/pull_request/checks.py
@@ -3,10 +3,10 @@
import asyncio
import json
import logging
-import os
import re
import tempfile
import typing
+from pathlib import Path
from typing import Any
import github
@@ -279,12 +279,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request"},
)
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the configuration."""
- with open(
- os.path.join(os.path.dirname(__file__), "checks-schema.json"),
- encoding="utf-8",
- ) as schema_file:
+ with (Path(__file__).parent / "checks-schema.json").open(encoding="utf-8") as schema_file:
schema = json.loads(schema_file.read())
for key in ("$schema", "$id"):
if key in schema:
diff --git a/github_app_geo_project/module/pull_request/links.py b/github_app_geo_project/module/pull_request/links.py
index ab683760e3..c629b14e39 100644
--- a/github_app_geo_project/module/pull_request/links.py
+++ b/github_app_geo_project/module/pull_request/links.py
@@ -1,8 +1,8 @@
"""Module that adds some links to the pull request message."""
import json
-import os
import re
+from pathlib import Path
from typing import Any
import github
@@ -100,12 +100,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request"},
)
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the configuration."""
- with open(
- os.path.join(os.path.dirname(__file__), "links-schema.json"),
- encoding="utf-8",
- ) as schema_file:
+ with (Path(__file__).perent / "links-schema.json").open(encoding="utf-8") as schema_file:
schema = json.loads(schema_file.read())
for key in ("$schema", "$id"):
if key in schema:
diff --git a/github_app_geo_project/module/standard/auto.py b/github_app_geo_project/module/standard/auto.py
index cbbd14ec3c..43dd2854bb 100644
--- a/github_app_geo_project/module/standard/auto.py
+++ b/github_app_geo_project/module/standard/auto.py
@@ -2,9 +2,9 @@
import json
import logging
-import os
import re
from abc import abstractmethod
+from pathlib import Path
from typing import Any, cast
import github
@@ -92,12 +92,9 @@ async def process(
return module.ProcessOutput()
return module.ProcessOutput()
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema of the module configuration."""
- with open(
- os.path.join(os.path.dirname(__file__), "auto-schema.json"),
- encoding="utf-8",
- ) as schema_file:
+ with (Path(__file__).parent / "auto-schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("definitions", {}).get("auto") # type: ignore[no-any-return]
def get_github_application_permissions(self) -> module.GitHubApplicationPermissions:
diff --git a/github_app_geo_project/module/standard/auto_close.py b/github_app_geo_project/module/standard/auto_close.py
index 627b19ffff..d442b0ef4c 100644
--- a/github_app_geo_project/module/standard/auto_close.py
+++ b/github_app_geo_project/module/standard/auto_close.py
@@ -32,4 +32,5 @@ def do_action(
Note that this method is called in the queue consuming Pod
"""
+ del context # Unused
pull_request.edit(state="closed")
diff --git a/github_app_geo_project/module/standard/auto_merge.py b/github_app_geo_project/module/standard/auto_merge.py
index 113897c65b..fe124148e6 100644
--- a/github_app_geo_project/module/standard/auto_merge.py
+++ b/github_app_geo_project/module/standard/auto_merge.py
@@ -44,4 +44,5 @@ def do_action(
Note that this method is called in the queue consuming Pod
"""
+ del context # Unused
pull_request.enable_automerge(merge_method="SQUASH")
diff --git a/github_app_geo_project/module/standard/auto_review.py b/github_app_geo_project/module/standard/auto_review.py
index 4ab6520063..8870eb32bd 100644
--- a/github_app_geo_project/module/standard/auto_review.py
+++ b/github_app_geo_project/module/standard/auto_review.py
@@ -36,4 +36,5 @@ def do_action(
Note that this method is called in the queue consuming Pod
"""
+ del context # Unused
pull_request.create_review(event="APPROVE")
diff --git a/github_app_geo_project/module/standard/changelog.py b/github_app_geo_project/module/standard/changelog.py
index 68de69ead2..941aa361b8 100644
--- a/github_app_geo_project/module/standard/changelog.py
+++ b/github_app_geo_project/module/standard/changelog.py
@@ -2,9 +2,9 @@
import json
import logging
-import os
import re
from collections.abc import Callable
+from pathlib import Path
from typing import Any, NamedTuple, cast
import github
@@ -652,13 +652,10 @@ async def process(
)
return module.ProcessOutput()
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema of the module configuration."""
# Get changelog-schema.json related to this file
- with open(
- os.path.join(os.path.dirname(__file__), "changelog-schema.json"),
- encoding="utf-8",
- ) as schema_file:
+ with (Path(__file__).parent / "changelog-schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("properties", {}).get("changelog") # type: ignore[no-any-return]
def get_github_application_permissions(self) -> module.GitHubApplicationPermissions:
diff --git a/github_app_geo_project/module/standard/patch.py b/github_app_geo_project/module/standard/patch.py
index 97e3d2ea28..8801c9702f 100644
--- a/github_app_geo_project/module/standard/patch.py
+++ b/github_app_geo_project/module/standard/patch.py
@@ -17,7 +17,7 @@
_LOGGER = logging.getLogger(__name__)
-class PatchException(Exception):
+class PatchError(Exception):
"""Error while applying the patch."""
@@ -195,9 +195,10 @@ async def process(
f"{artifact.name[:-6]}\n\nFrom the artifact of the previous workflow run",
)
if not success:
- raise PatchException(
- "Failed to commit the changes, see logs for details",
+ exception_message = (
+ "Failed to commit the changes, see logs for details"
)
+ raise PatchError(exception_message)
should_push = True
if should_push:
command = ["git", "push", "origin", f"HEAD:{workflow_run.head_branch}"]
diff --git a/github_app_geo_project/module/utils.py b/github_app_geo_project/module/utils.py
index 354d3e69d0..6fc9f68f83 100644
--- a/github_app_geo_project/module/utils.py
+++ b/github_app_geo_project/module/utils.py
@@ -7,8 +7,10 @@
import os
import re
import shlex
+from pathlib import Path
from typing import Any, cast
+import anyio
import github
import html_sanitizer
import markdownify
@@ -68,7 +70,7 @@ def parse_dashboard_issue(issue_data: str) -> DashboardIssueRaw:
lines = issue_data.split("\n")
last_is_check = False
for line in lines:
- line = line.strip()
+ line = line.strip() # noqa: PLW2901
check_match = _CHECK_RE.match(line)
if check_match is not None:
checked = check_match.group(1) == "x"
@@ -457,14 +459,14 @@ def from_async_artifacts(
)
-def get_cwd() -> str | None:
+def get_cwd() -> Path | None:
"""
Get the current working directory.
Did not raise an exception if it does not exist, return None instead.
"""
try:
- return os.getcwd()
+ return Path.cwd()
except FileNotFoundError:
return None
@@ -472,11 +474,11 @@ def get_cwd() -> str | None:
async def run_timeout(
command: list[str],
env: dict[str, str] | None,
- timeout: int,
+ timeout: int, # noqa: ASYNC109
success_message: str,
error_message: str,
timeout_message: str,
- cwd: str | None = None,
+ cwd: Path | None = None,
error: bool = True,
) -> tuple[str | None, bool, Message | None]:
"""
@@ -507,7 +509,7 @@ async def run_timeout(
args.append(timeout)
_LOGGER.debug(log_message, *args)
async_proc = None
- start = datetime.datetime.now()
+ start = datetime.datetime.now(datetime.UTC)
try:
async with asyncio.timeout(timeout):
try:
@@ -525,15 +527,15 @@ async def run_timeout(
message: Message = AnsiProcessMessage.from_async_artifacts(command, async_proc, stdout, stderr)
success = async_proc.returncode == 0
if success:
- message.title = f"{success_message}, in {datetime.datetime.now() - start}s."
+ message.title = f"{success_message}, in {datetime.datetime.now(datetime.UTC) - start}s."
_LOGGER.debug(message)
else:
- message.title = f"{error_message}, in {datetime.datetime.now() - start}s."
+ message.title = f"{error_message}, in {datetime.datetime.now(datetime.UTC) - start}s."
_LOGGER.warning(message)
return stdout.decode(), success, message
except FileNotFoundError as exception:
if error:
- _LOGGER.exception("%s not found: %s", command[0], exception)
+ _LOGGER.exception("%s not found: %s", command[0], exception) # noqa: TRY401
else:
_LOGGER.warning("%s not found", command[0])
cmd = ["find", "/", "-name", command[0]]
@@ -564,7 +566,7 @@ async def run_timeout(
_LOGGER.warning(message)
return None, False, message
if error:
- _LOGGER.exception("TimeoutError for %s: %s", command[0], exception)
+ _LOGGER.exception("TimeoutError for %s: %s", command[0], exception) # noqa: TRY401
else:
_LOGGER.warning("TimeoutError for %s", command[0])
return None, False, AnsiProcessMessage(command, None, "", "", str(exception))
@@ -617,7 +619,7 @@ async def create_commit(message: str, pre_commit_check: bool = True) -> bool:
)
if not success and pre_commit_check:
# On pre-commit issues, add them to the commit, and try again without the pre-commit
- success = await create_commit(message, False)
+ success = await create_commit(message, pre_commit_check=False)
return success
@@ -694,7 +696,6 @@ async def auto_merge_pull_request(pull_request: github.PullRequest.PullRequest)
if n != 0:
await asyncio.sleep(math.pow(n, 2))
pull_request.enable_automerge(merge_method="MERGE")
- return
except github.GithubException as exception:
errors = exception.data.get("errors", [])
if (
@@ -704,6 +705,8 @@ async def auto_merge_pull_request(pull_request: github.PullRequest.PullRequest)
):
continue
raise
+ else:
+ return
if exception is not None:
raise exception
@@ -716,7 +719,7 @@ async def create_commit_pull_request(
project: configuration.GithubProject,
) -> tuple[bool, github.PullRequest.PullRequest | None]:
"""Do a commit, then create a pull request."""
- if os.path.exists(".pre-commit-config.yaml"):
+ if Path(".pre-commit-config.yaml").exists():
try:
command = ["pre-commit", "install"]
proc = await asyncio.create_subprocess_exec( # pylint: disable=subprocess-run-check
@@ -761,11 +764,11 @@ def close_pull_request_issues(new_branch: str, message: str, project: configurat
async def git_clone(github_project: configuration.GithubProject, branch: str) -> bool:
"""Clone the Git repository."""
# Store the ssh key
- directory = os.path.expanduser("~/.ssh/")
- if not os.path.exists(directory):
- os.makedirs(directory)
- with open(os.path.join(directory, "id_rsa"), "w", encoding="utf-8") as file:
- file.write(github_project.application.auth.private_key)
+ directory = Path("~/.ssh/").expanduser()
+ if not directory.exists():
+ directory.mkdir(parents=True)
+ async with await anyio.open_file(directory / "id_rsa", "w", encoding="utf-8") as file:
+ await file.write(github_project.application.auth.private_key)
command = [
"git",
@@ -903,19 +906,19 @@ def manage_updated(status: dict[str, Any], key: str, days_old: int = 2) -> None:
Add an updated field to the status and remove the old status.
"""
- status.setdefault(key, {})["updated"] = datetime.datetime.now().isoformat()
+ status.setdefault(key, {})["updated"] = datetime.datetime.now(datetime.UTC).isoformat()
for other_key, other_object in list(status.items()):
if (
not isinstance(other_object, dict)
or "updated" not in other_object
or datetime.datetime.fromisoformat(other_object["updated"])
- < datetime.datetime.now() - datetime.timedelta(days=days_old)
+ < datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days_old)
):
_LOGGER.debug(
"Remove old status %s (%s < %s)",
other_key,
other_object.get("updated", "-"),
- datetime.datetime.now() - datetime.timedelta(days=days_old),
+ datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days_old),
)
del status[other_key]
@@ -931,16 +934,16 @@ def manage_updated_separated(
Add an updated field to the status and remove the old status.
"""
- updated[key] = datetime.datetime.now()
+ updated[key] = datetime.datetime.now(datetime.UTC)
_LOGGER.debug("Set updated %s to %s", key, updated[key])
- min_date = datetime.datetime.now() - datetime.timedelta(days=days_old)
+ min_date = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days_old)
for other_key, date in list(updated.items()):
if date < min_date:
_LOGGER.debug(
"Remove old date %s (%s < %s)",
other_key,
date,
- datetime.datetime.now() - datetime.timedelta(days=days_old),
+ datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=days_old),
)
del updated[other_key]
diff --git a/github_app_geo_project/module/versions/__init__.py b/github_app_geo_project/module/versions/__init__.py
index d8d0869ec2..4f1e456fdf 100644
--- a/github_app_geo_project/module/versions/__init__.py
+++ b/github_app_geo_project/module/versions/__init__.py
@@ -10,6 +10,7 @@
import tempfile
import tomllib
from collections.abc import Iterable
+from pathlib import Path
from typing import Any
import aiohttp
@@ -97,7 +98,7 @@ class _EventData(BaseModel):
alternate_versions: list[str] | None = None
-class VersionException(Exception):
+class VersionError(Exception):
"""Error while updating the versions."""
@@ -131,9 +132,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[_E
return [module.Action(data=_EventData(step=1), priority=module.PRIORITY_CRON)]
return []
- def get_json_schema(self) -> dict[str, Any]:
+ async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the module configuration."""
- with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
+ with (Path(__file__).parent / "schema.json").open(encoding="utf-8") as schema_file:
schema = json.loads(schema_file.read())
for key in ("$schema", "$id"):
if key in schema:
@@ -236,7 +237,8 @@ async def process(
os.chdir(tmpdirname)
success = await module_utils.git_clone(context.github_project, branch)
if not success:
- raise VersionException("Failed to clone the repository")
+ exception_message = "Failed to clone the repository"
+ raise VersionError(exception_message)
version_status = status.versions[version]
version_status.names_by_datasource.clear()
@@ -295,7 +297,7 @@ async def process(
_LOGGER.debug(message)
return ProcessOutput(transversal_status=context.transversal_status)
- raise VersionException("Invalid step")
+ raise VersionError("Invalid step")
def has_transversal_dashboard(self) -> bool:
"""Return True if the module has a transversal dashboard."""
@@ -367,7 +369,6 @@ def get_transversal_dashboard(
)
return module.TransversalDashboardOutput(
- # template="dashboard.html",
renderer="github_app_geo_project:module/versions/dashboard.html",
data={"repositories": list(transversal_status.repositories.keys())},
)
@@ -454,7 +455,7 @@ async def _get_names(
_LOGGER.error(message)
else:
for filename in stdout.decode().splitlines():
- with open(filename, encoding="utf-8") as file:
+ with Path(filename).open(encoding="utf-8") as file:
data = tomllib.loads(file.read())
name = data.get("project", {}).get("name")
names = names_by_datasource.setdefault("pypi", _TransversalStatusNameByDatasource()).names
@@ -482,7 +483,7 @@ async def _get_names(
_LOGGER.error(message)
else:
for filename in stdout.decode().splitlines():
- with open(filename, encoding="utf-8") as file:
+ with Path(filename).open(encoding="utf-8") as file:
names = names_by_datasource.setdefault("pypi", _TransversalStatusNameByDatasource()).names
for line in file:
match = re.match(r'^ *name ?= ?[\'"](.*)[\'"],?$', line)
@@ -490,8 +491,8 @@ async def _get_names(
names.append(match.group(1))
os.environ["GITHUB_REPOSITORY"] = f"{context.github_project.owner}/{context.github_project.repository}"
docker_config = {}
- if os.path.exists(".github/publish.yaml"):
- with open(".github/publish.yaml", encoding="utf-8") as file:
+ if Path(".github/publish.yaml").exists():
+ with Path(".github/publish.yaml").open(encoding="utf-8") as file:
docker_config = yaml.load(file, Loader=yaml.SafeLoader).get("docker", {})
else:
data = c2cciutils.get_config()
@@ -542,7 +543,7 @@ async def _get_names(
_LOGGER.error(message)
else:
for filename in stdout.decode().splitlines():
- with open(filename, encoding="utf-8") as file:
+ with Path(filename).open(encoding="utf-8") as file:
data = json.load(file)
name = data.get("name")
names = names_by_datasource.setdefault("npm", _TransversalStatusNameByDatasource()).names
@@ -582,7 +583,7 @@ async def _get_dependencies(
if proc.returncode != 0:
message.title = "Failed to get the dependencies"
_LOGGER.error(message)
- raise VersionException(message.title)
+ raise VersionError(message.title)
message.title = "Got the dependencies"
_LOGGER.debug(message)
@@ -697,11 +698,11 @@ async def _update_upstream_versions(
if package_status.upstream_updated and (
package_status.upstream_updated
- > datetime.datetime.now()
+ > datetime.datetime.now(datetime.UTC)
- utils.parse_duration(os.environ.get("GHCI_EXTERNAL_PACKAGES_UPDATE_PERIOD", "30d"))
):
return
- package_status.upstream_updated = datetime.datetime.now()
+ package_status.upstream_updated = datetime.datetime.now(datetime.UTC)
async with (
aiohttp.ClientSession() as session,
@@ -723,7 +724,7 @@ async def _update_upstream_versions(
else:
if not isinstance(eol, str):
continue
- if datetime.datetime.fromisoformat(eol) < datetime.datetime.now():
+ if datetime.datetime.fromisoformat(eol) < datetime.datetime.now(datetime.UTC):
continue
package_status.versions[cycle["cycle"]] = _TransversalStatusVersion(
support=eol,
@@ -740,7 +741,9 @@ def _parse_support_date(text: str) -> datetime.datetime:
return datetime.datetime.fromisoformat(text)
except ValueError:
# Parse date like 01/01/2024
- return datetime.datetime.strptime(text, "%d/%m/%Y")
+ return datetime.datetime.strptime(text, "%d/%m/%Y").replace(
+ tzinfo=datetime.UTC,
+ )
def _is_supported(base: str, other: str) -> bool:
@@ -793,7 +796,7 @@ def _build_internal_dependencies(
dependency_minor = _canonical_minor_version(datasource_name, dependency_version)
if datasource_name == "docker":
assert len(dependency_package_data.status_by_version) == 1
- support = list(dependency_package_data.status_by_version.values())[0]
+ support = next(dependency_package_data.status_by_version.values())
else:
support = dependency_package_data.status_by_version.get(
dependency_minor,
diff --git a/github_app_geo_project/scripts/health_check.py b/github_app_geo_project/scripts/health_check.py
index 6ec51ebb60..f4f291046c 100644
--- a/github_app_geo_project/scripts/health_check.py
+++ b/github_app_geo_project/scripts/health_check.py
@@ -1,10 +1,10 @@
"""Script used to check the health of the process-queue daemon."""
import argparse
-import os
import subprocess # nosec
import sys
import time
+from pathlib import Path
def main() -> None:
@@ -13,7 +13,7 @@ def main() -> None:
parser.add_argument("--timeout", type=int, help="Timeout in seconds")
args = parser.parse_args()
- blocked_time = time.time() - os.path.getmtime("/var/ghci/watch_dog")
+ blocked_time = time.time() - Path("/var/ghci/watch_dog").stat().st_mtime
if blocked_time > args.timeout / 2:
subprocess.run(["ls", "-l", "/var/ghci/"], check=False) # pylint: disable=subprocess-run-check
diff --git a/github_app_geo_project/scripts/process_queue.py b/github_app_geo_project/scripts/process_queue.py
index 4cde979794..fa4d7fe8b3 100644
--- a/github_app_geo_project/scripts/process_queue.py
+++ b/github_app_geo_project/scripts/process_queue.py
@@ -14,6 +14,7 @@
import sys
import time
import urllib.parse
+from pathlib import Path
from typing import Any, NamedTuple, cast
import c2cwsgiutils.loader
@@ -793,13 +794,14 @@ def __init__(
],
return_when_empty: bool,
max_priority: int,
- ):
+ ) -> None:
self.config = config
self.Session = Session # pylint: disable=invalid-name
self.end_when_empty = return_when_empty
self.max_priority = max_priority
- async def __call__(self, *args: Any, **kwds: Any) -> Any:
+ async def __call__(self, *args: Any, **kwargs: Any) -> Any:
+ del args, kwargs
empty_thread_sleep = int(os.environ.get("GHCI_EMPTY_THREAD_SLEEP", 10))
while True:
@@ -827,11 +829,12 @@ def __init__(
Session: sqlalchemy.orm.sessionmaker[ # pylint: disable=invalid-name,unsubscriptable-object
sqlalchemy.orm.Session
],
- ):
+ ) -> None:
self.Session = Session # pylint: disable=invalid-name
self.last_run = time.time()
- async def __call__(self, *args: Any, **kwds: Any) -> Any:
+ async def __call__(self, *args: Any, **kwargs: Any) -> Any:
+ del args, kwargs
current_task = asyncio.current_task()
if current_task is not None:
current_task.set_name("PrometheusWatch")
@@ -880,7 +883,7 @@ def _watch(self) -> None:
if time.time() - self.last_run > 300:
error_message = ["Old Status"]
- with open("/var/ghci/job_info", encoding="utf-8") as file_:
+ with Path("/var/ghci/job_info").open(encoding="utf-8") as file_:
error_message.extend(file_.read().split("\n"))
error_message.append("-" * 30)
error_message.append("New status")
@@ -890,21 +893,22 @@ def _watch(self) -> None:
_LOGGER.error(message)
self.last_run = time.time()
- with open("/var/ghci/job_info", "w", encoding="utf-8") as file_:
+ with Path("/var/ghci/job_info").open("w", encoding="utf-8") as file_:
file_.write("\n".join(text))
file_.write("\n")
time.sleep(10)
class _WatchDog:
- async def __call__(self, *args: Any, **kwds: Any) -> Any:
+ async def __call__(self, *args: Any, **kwargs: Any) -> Any:
+ del args, kwargs
current_task = asyncio.current_task()
if current_task is not None:
current_task.set_name("WatchDog")
while True:
_LOGGER.debug("Watch dog: alive")
- with open("/var/ghci/watch_dog", "w", encoding="utf-8") as file_:
- file_.write(datetime.datetime.now().isoformat())
+ with Path("/var/ghci/watch_dog").open("w", encoding="utf-8") as file_:
+ file_.write(datetime.datetime.now(datetime.UTC).isoformat())
file_.write("\n")
await asyncio.sleep(60)
diff --git a/github_app_geo_project/security.py b/github_app_geo_project/security.py
index c96881303b..0005550d96 100644
--- a/github_app_geo_project/security.py
+++ b/github_app_geo_project/security.py
@@ -77,7 +77,7 @@ def identity(self, request: pyramid.request.Request) -> User:
our_signature,
request.headers["X-Hub-Signature-256"].split("=", 1)[1],
):
- user = User("github_webhook", None, None, None, True, None, request)
+ user = User("github_webhook", None, None, None, is_auth=True, token=None, request=request)
else:
_LOGGER.warning("Invalid GitHub signature")
_LOGGER.debug(
@@ -107,11 +107,11 @@ def identity(self, request: pyramid.request.Request) -> User:
request,
)
else:
- user = User("anonymous", None, None, None, False, None, request)
+ user = User("anonymous", None, None, None, is_auth=False, token=None, request=request)
request.user = user
- return request.user # type: ignore
+ return request.user # type: ignore[no-any-return]
def authenticated_userid(self, request: pyramid.request.Request) -> str | None:
"""Return a string ID for the user."""
diff --git a/github_app_geo_project/server.py b/github_app_geo_project/server.py
index acacd6313c..aaa63d5daa 100644
--- a/github_app_geo_project/server.py
+++ b/github_app_geo_project/server.py
@@ -53,7 +53,7 @@ def main(global_config: Any, **settings: Any) -> Router:
)
config.include(c2cwsgiutils.pyramid.includeme)
- # dbsession = c2cwsgiutils.db.init(config, "sqlalchemy", "sqlalchemy-slave")
+ # dbsession = c2cwsgiutils.db.init(config, "sqlalchemy", "sqlalchemy-slave") # noqa: ERA001
dbsession = c2cwsgiutils.db.init(config, "sqlalchemy")
health_check = c2cwsgiutils.health_check.HealthCheck(config)
diff --git a/github_app_geo_project/templates/__init__.py b/github_app_geo_project/templates/__init__.py
index 6597dff706..6da165918c 100644
--- a/github_app_geo_project/templates/__init__.py
+++ b/github_app_geo_project/templates/__init__.py
@@ -1,7 +1,7 @@
"""The mako templates to render the pages."""
+import datetime
import logging
-from datetime import UTC, datetime, timedelta
import html_sanitizer
import markdown as markdown_lib # mypy: ignore[import-untyped]
@@ -49,14 +49,14 @@ def markdown(text: str) -> str:
return sanitizer(markdown_lib.markdown(text))
-def pprint_short_date(date_in: str | datetime) -> str:
+def pprint_short_date(date_in: str | datetime.datetime) -> str:
"""Pretty print a short date (essentially time to current time)."""
if date_in == "None" or date_in is None:
return "-"
- date = datetime.fromisoformat(date_in) if isinstance(date_in, str) else date_in
+ date = datetime.datetime.fromisoformat(date_in) if isinstance(date_in, str) else date_in
- delta = datetime.now(UTC) - date
+ delta = datetime.datetime.now(datetime.UTC) - date
if delta.total_seconds() < 1:
short_date = "now"
elif delta.total_seconds() < 60:
@@ -73,16 +73,16 @@ def pprint_short_date(date_in: str | datetime) -> str:
return short_date
-def pprint_full_date(date_in: str | datetime) -> str:
+def pprint_full_date(date_in: str | datetime.datetime) -> str:
"""Pretty print a full date."""
if date_in == "None" or date_in is None:
return "-"
- date = datetime.fromisoformat(date_in) if isinstance(date_in, str) else date_in
- return datetime.strftime(date, "%Y-%m-%d %H:%M:%S")
+ date = datetime.datetime.fromisoformat(date_in) if isinstance(date_in, str) else date_in
+ return datetime.datetime.strftime(date, "%Y-%m-%d %H:%M:%S")
-def pprint_date(date_in: str | datetime) -> str:
+def pprint_date(date_in: str | datetime.datetime) -> str:
"""
Pretty print a date.
@@ -97,7 +97,7 @@ def pprint_date(date_in: str | datetime) -> str:
return f'{short_date}'
-def pprint_duration(duration_in: str | timedelta) -> str:
+def pprint_duration(duration_in: str | datetime.timedelta) -> str:
"""Pretty print a duration."""
if duration_in == "None" or duration_in is None:
return "-"
@@ -108,11 +108,21 @@ def pprint_duration(duration_in: str | timedelta) -> str:
day_, duration_in = duration_in.split(day_txt)
day = int(day_)
- date = datetime.strptime(duration_in, "%H:%M:%S.%f" if "." in duration_in else "%H:%M:%S")
+ date = datetime.datetime.strptime(
+ duration_in,
+ "%H:%M:%S.%f" if "." in duration_in else "%H:%M:%S",
+ ).replace(
+ tzinfo=datetime.UTC,
+ )
else:
day = 0
- date = datetime.strptime(duration_in, "%H:%M:%S.%f" if "." in duration_in else "%H:%M:%S")
- duration = timedelta(
+ date = datetime.datetime.strptime(
+ duration_in,
+ "%H:%M:%S.%f" if "." in duration_in else "%H:%M:%S",
+ ).replace(
+ tzinfo=datetime.UTC,
+ )
+ duration = datetime.timedelta(
days=day,
hours=date.hour,
minutes=date.minute,
diff --git a/github_app_geo_project/utils.py b/github_app_geo_project/utils.py
index 199f332934..571c240bff 100644
--- a/github_app_geo_project/utils.py
+++ b/github_app_geo_project/utils.py
@@ -14,9 +14,9 @@
_ISSUE_END = ""
-_JSON_LEXER = pygments.lexers.JsonLexer()
-_YAML_LEXER = pygments.lexers.YamlLexer()
-_HTML_FORMATTER = pygments.formatters.HtmlFormatter(noclasses=True, style="github-dark")
+_JSON_LEXER = pygments.lexers.JsonLexer() # pylint: disable=no-member
+_YAML_LEXER = pygments.lexers.YamlLexer() # pylint: disable=no-member
+_HTML_FORMATTER = pygments.formatters.HtmlFormatter(noclasses=True, style="github-dark") # pylint: disable=no-member
def get_dashboard_issue_module(text: str, current_module: str) -> str:
@@ -100,4 +100,5 @@ def parse_duration(text: str) -> datetime.timedelta:
return datetime.timedelta(minutes=int(text[:-1]))
if text.endswith("h"):
return datetime.timedelta(hours=int(text[:-1]))
- raise ValueError(f"Invalid time delta: {text}")
+ message = f"Invalid time delta: {text}"
+ raise ValueError(message)
diff --git a/github_app_geo_project/views/dashboard.py b/github_app_geo_project/views/dashboard.py
index 7f43045ccc..efab90dfe1 100644
--- a/github_app_geo_project/views/dashboard.py
+++ b/github_app_geo_project/views/dashboard.py
@@ -18,7 +18,7 @@
_LOGGER = logging.getLogger(__name__)
-@view_config(route_name="dashboard", renderer="github_app_geo_project:templates/dashboard.html") # type: ignore
+@view_config(route_name="dashboard", renderer="github_app_geo_project:templates/dashboard.html") # type: ignore[misc]
def dashboard(request: pyramid.request.Request) -> dict[str, Any]:
"""Get the dashboard."""
repository = os.environ["C2C_AUTH_GITHUB_REPOSITORY"]
@@ -29,11 +29,13 @@ def dashboard(request: pyramid.request.Request) -> dict[str, Any]:
admin = isinstance(user_permission, pyramid.security.Allowed)
if not admin:
- raise pyramid.httpexceptions.HTTPForbidden("You are not allowed to access this page")
+ message = "You are not allowed to access this page"
+ raise pyramid.httpexceptions.HTTPForbidden(message)
module_name = request.matchdict["module"]
if module_name not in modules.MODULES:
- raise pyramid.httpexceptions.HTTPNotFound(f"The module {module_name} does not exist")
+ message = f"The module {module_name} does not exist"
+ raise pyramid.httpexceptions.HTTPNotFound(message)
module_instance = modules.MODULES[module_name]
session_factory = request.registry["dbsession_factory"]
diff --git a/github_app_geo_project/views/home.py b/github_app_geo_project/views/home.py
index 79ef5e0cf2..f0b4cd7951 100644
--- a/github_app_geo_project/views/home.py
+++ b/github_app_geo_project/views/home.py
@@ -24,7 +24,7 @@ def _gt_access(
return access_number[access_1] > access_number[access_2]
-@view_config(route_name="home", renderer="github_app_geo_project.templates:home.html") # type: ignore
+@view_config(route_name="home", renderer="github_app_geo_project.templates:home.html") # type: ignore[misc]
def output(request: pyramid.request.Request) -> dict[str, Any]:
"""Get the welcome page."""
repository = os.environ["C2C_AUTH_GITHUB_REPOSITORY"]
@@ -162,7 +162,7 @@ def output(request: pyramid.request.Request) -> dict[str, Any]:
_LOGGER.error(application["errors"][-1])
except Exception as exception: # pylint: disable=broad-exception-caught
application["errors"].append(str(exception))
- _LOGGER.error(application["errors"][-1], exception)
+ _LOGGER.error(application["errors"][-1], exception) # noqa: TRY400
applications.append(application)
diff --git a/github_app_geo_project/views/logs.py b/github_app_geo_project/views/logs.py
index 4da7d8ceb0..8b79ae8f1d 100644
--- a/github_app_geo_project/views/logs.py
+++ b/github_app_geo_project/views/logs.py
@@ -15,11 +15,11 @@
_LOGGER = logging.getLogger(__name__)
-@view_config(route_name="logs", renderer="github_app_geo_project:templates/logs.html") # type: ignore
+@view_config(route_name="logs", renderer="github_app_geo_project:templates/logs.html") # type: ignore[misc]
def logs_view(request: pyramid.request.Request) -> dict[str, Any]:
"""Get the logs of a job."""
if not request.is_authenticated:
- raise pyramid.httpexceptions.HTTPForbidden()
+ raise pyramid.httpexceptions.HTTPForbidden
title = f"Logs of job {request.matchdict['id']}"
logs = "Element not found"
@@ -40,7 +40,7 @@ def logs_view(request: pyramid.request.Request) -> dict[str, Any]:
if has_access:
logs = job.log
else:
- raise pyramid.httpexceptions.HTTPUnauthorized()
+ raise pyramid.httpexceptions.HTTPUnauthorized
return {
"title": title,
"logs": logs,
@@ -52,4 +52,4 @@ def logs_view(request: pyramid.request.Request) -> dict[str, Any]:
else ("green" if job.status == models.JobStatus.DONE else "blue")
),
}
- raise pyramid.httpexceptions.HTTPNotFound()
+ raise pyramid.httpexceptions.HTTPNotFound
diff --git a/github_app_geo_project/views/output.py b/github_app_geo_project/views/output.py
index ea9f82dd49..ab356617ca 100644
--- a/github_app_geo_project/views/output.py
+++ b/github_app_geo_project/views/output.py
@@ -15,7 +15,7 @@
_LOGGER = logging.getLogger(__name__)
-@view_config(route_name="output", renderer="github_app_geo_project:templates/output.html") # type: ignore
+@view_config(route_name="output", renderer="github_app_geo_project:templates/output.html") # type: ignore[misc]
def output(request: pyramid.request.Request) -> dict[str, Any]:
"""Get the output of a job."""
title = request.matchdict["id"]
diff --git a/github_app_geo_project/views/project.py b/github_app_geo_project/views/project.py
index a0897a3d71..d1f5d18494 100644
--- a/github_app_geo_project/views/project.py
+++ b/github_app_geo_project/views/project.py
@@ -37,7 +37,7 @@ def _date_tooltip(job: models.Queue) -> str:
return f"created: {_pprint_date(created)}
started: {_pprint_date(started)}
duration: {pprint_duration(finished - started)}"
-@view_config(route_name="project", renderer="github_app_geo_project:templates/project.html") # type: ignore
+@view_config(route_name="project", renderer="github_app_geo_project:templates/project.html") # type: ignore[misc]
def project(request: pyramid.request.Request) -> dict[str, Any]:
"""Get the output of a job."""
owner = request.matchdict["owner"]
diff --git a/github_app_geo_project/views/schema.py b/github_app_geo_project/views/schema.py
index c34e05e46d..d345014c51 100644
--- a/github_app_geo_project/views/schema.py
+++ b/github_app_geo_project/views/schema.py
@@ -2,7 +2,7 @@
import json
import logging
-import os.path
+from pathlib import Path
from typing import Any
import pyramid.httpexceptions
@@ -16,7 +16,7 @@
_LOGGER = logging.getLogger(__name__)
-@view_config(route_name="schema", renderer="json") # type: ignore
+@view_config(route_name="schema", renderer="json") # type: ignore[misc]
def schema_view(request: pyramid.request.Request) -> dict[str, Any]:
"""Get the welcome page."""
module_names = set()
@@ -24,10 +24,8 @@ def schema_view(request: pyramid.request.Request) -> dict[str, Any]:
module_names.update(request.registry.settings[f"application.{app}.modules"].split())
# get project-schema-content
- with open(
- os.path.join(os.path.dirname(os.path.dirname(__file__)), "project-schema.json"),
- encoding="utf-8",
- ) as schema_file:
+ schema_path = Path(__file__).parent.parent / "project-schema.json"
+ with schema_path.open(encoding="utf-8") as schema_file:
schema: dict[str, Any] = json.loads(schema_file.read())
del schema["properties"]["module-configuration"]
diff --git a/github_app_geo_project/views/webhook.py b/github_app_geo_project/views/webhook.py
index 0fa91085d0..1457580d4d 100644
--- a/github_app_geo_project/views/webhook.py
+++ b/github_app_geo_project/views/webhook.py
@@ -22,7 +22,7 @@
# curl -X POST http://localhost:9120/webhook/generic -d '{"repository":{"full_name": "sbrunner/test-github-app"}}'
-@view_config(route_name="webhook", renderer="json") # type: ignore
+@view_config(route_name="webhook", renderer="json") # type: ignore[misc]
def webhook(request: pyramid.request.Request) -> dict[str, None]:
"""Receive GitHub application webhook URL."""
application = request.matchdict["application"]
@@ -34,7 +34,8 @@ def webhook(request: pyramid.request.Request) -> dict[str, None]:
if "X-Hub-Signature-256" not in request.headers:
_LOGGER.error("No signature in the request")
if not dry_run:
- raise pyramid.httpexceptions.HTTPBadRequest("No signature in the request")
+ message = "No signature in the request"
+ raise pyramid.httpexceptions.HTTPBadRequest(message)
else:
our_signature = hmac.new(
@@ -48,7 +49,8 @@ def webhook(request: pyramid.request.Request) -> dict[str, None]:
):
_LOGGER.error("Invalid signature in the request")
if not dry_run:
- raise pyramid.httpexceptions.HTTPBadRequest("Invalid signature in the request")
+ message = "Invalid signature in the request"
+ raise pyramid.httpexceptions.HTTPBadRequest(message)
_LOGGER.debug(
"Webhook received for %s on %s",
@@ -128,7 +130,7 @@ def webhook(request: pyramid.request.Request) -> dict[str, None]:
check_run.edit(status="queued")
except github.GithubException as exception:
if exception.status == 404:
- _LOGGER.error("Repository not found: %s/%s", owner, repository)
+ _LOGGER.error("Repository not found: %s/%s", owner, repository) # noqa: TRY400
else:
_LOGGER.exception("Error while getting check suite")
@@ -156,7 +158,7 @@ def webhook(request: pyramid.request.Request) -> dict[str, None]:
project_github.repo.get_check_run(data["check_run"]["id"]).edit(status="queued")
except github.GithubException as exception:
if exception.status == 404:
- _LOGGER.error("Repository not found: %s/%s", owner, repository)
+ _LOGGER.error("Repository not found: %s/%s", owner, repository) # noqa: TRY400
else:
_LOGGER.exception("Error while getting check run")
@@ -329,8 +331,8 @@ def process_event(context: ProcessContext) -> None:
)
context.session.commit()
- except Exception as exception: # pylint: disable=broad-except
- _LOGGER.exception("Error while getting actions for %s: %s", name, exception)
+ except Exception: # pylint: disable=broad-except
+ _LOGGER.exception("Error while getting actions for %s", name)
def create_checks(
diff --git a/github_app_geo_project/views/welcome.py b/github_app_geo_project/views/welcome.py
index a36ea47fe7..e3ccee32bf 100644
--- a/github_app_geo_project/views/welcome.py
+++ b/github_app_geo_project/views/welcome.py
@@ -14,7 +14,7 @@
_LOGGER = logging.getLogger(__name__)
-@view_config(route_name="welcome", renderer="github_app_geo_project:templates/welcome.html") # type: ignore
+@view_config(route_name="welcome", renderer="github_app_geo_project:templates/welcome.html") # type: ignore[misc]
def output(request: pyramid.request.Request) -> dict[str, Any]:
"""Get the welcome page."""
del request # Unused
diff --git a/poetry.lock b/poetry.lock
index 7e93519be8..f914d6ea15 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -189,6 +189,28 @@ files = [
docs = ["mkdocs", "mkdocs-material", "mkdocs-material-extensions", "mkdocstrings", "mkdocstrings-python", "pymdown-extensions"]
test = ["pytest", "pytest-cov"]
+[[package]]
+name = "anyio"
+version = "4.8.0"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+optional = false
+python-versions = ">=3.9"
+groups = ["main"]
+files = [
+ {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"},
+ {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"},
+]
+
+[package.dependencies]
+idna = ">=2.8"
+sniffio = ">=1.1"
+typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""}
+
+[package.extras]
+doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"]
+trio = ["trio (>=0.26.1)"]
+
[[package]]
name = "applications-download"
version = "0.8.0"
@@ -3871,6 +3893,18 @@ files = [
{file = "smmap-5.0.2.tar.gz", hash = "sha256:26ea65a03958fa0c8a1c7e8c7a58fdc77221b8910f6be2131affade476898ad5"},
]
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+description = "Sniff out which async library your code is running under"
+optional = false
+python-versions = ">=3.7"
+groups = ["main"]
+files = [
+ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
+]
+
[[package]]
name = "snowballstemmer"
version = "2.2.0"
@@ -4718,4 +4752,4 @@ test = ["zope.testing"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.13"
-content-hash = "0063c902e32680a10a849cf8df7f78119f2e62edc47d4764540efacac5f555f7"
+content-hash = "24e5c8a258d56720924924590aedfdecb2dd7bb3f848aea0be3fbbf6765bb70f"
diff --git a/pyproject.toml b/pyproject.toml
index 642bcb5815..d2bd9ad437 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -82,6 +82,7 @@ waitress = "3.0.2"
lxml-html-clean = "0.4.1"
tag-publish = "0.13.3"
aiohttp = "3.11.11"
+anyio = "4.8.0"
[tool.poetry.group.dev.dependencies]
c2cwsgiutils = { version = "6.1.7", extras = ["test-images"] }