Skip to content

Commit

Permalink
Fix some of the new Prospector issues
Browse files Browse the repository at this point in the history
  • Loading branch information
sbrunner committed Feb 21, 2025
1 parent 38112bc commit efd59e1
Show file tree
Hide file tree
Showing 32 changed files with 227 additions and 167 deletions.
2 changes: 1 addition & 1 deletion github_app_geo_project/module/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ def cleanup(self, context: CleanupContext[_EVENT_DATA]) -> None:
del context

@abstractmethod
def get_json_schema(self) -> dict[str, Any]:
async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema of the module configuration."""
super_ = [c for c in self.__class__.__orig_bases__ if c.__origin__ == Module][0] # type: ignore[attr-defined] # pylint: disable=no-member
generic_element = super_.__args__[0]
Expand Down
27 changes: 15 additions & 12 deletions github_app_geo_project/module/audit/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import asyncio
import datetime
import glob
import json
import logging
import os
Expand All @@ -11,6 +10,7 @@
import subprocess # nosec
import tempfile
import urllib.parse
from pathlib import Path
from typing import Any, cast

import github
Expand Down Expand Up @@ -157,8 +157,9 @@ async def _process_snyk_dpkg(
logs_url = urllib.parse.urljoin(context.service_url, f"logs/{context.job_id}")
if context.module_event_data.type == "snyk":
python_version = ""
if os.path.exists(".tool-versions"):
with open(".tool-versions", encoding="utf-8") as file:
tool_versions = Path(".tool-versions")
if tool_versions.exists():
with tool_versions.open(encoding="utf-8") as file:
for line in file:
if line.startswith("python "):
python_version = ".".join(line.split(" ")[1].split(".")[0:2]).strip()
Expand Down Expand Up @@ -197,8 +198,11 @@ async def _process_snyk_dpkg(
if context.module_event_data.type == "dpkg":
body_md = "Update dpkg packages"

if os.path.exists("ci/dpkg-versions.yaml") or os.path.exists(
".github/dpkg-versions.yaml",
if (
Path("ci/dpkg-versions.yaml").exists()
or Path(
".github/dpkg-versions.yaml",
).exists()
):
await audit_utils.dpkg(
context.module_config.get("dpkg", {}),
Expand Down Expand Up @@ -339,7 +343,7 @@ async def _use_python_version(python_version: str) -> dict[str, str]:

# Get path from /pyenv/versions/{python_version}.*/bin/
env = os.environ.copy()
bin_paths = glob.glob(f"/pyenv/versions/{python_version}.*/bin")
bin_paths = Path("/pyenv/versions/").glob(f"{python_version}.*/bin")
if bin_paths:
env["PATH"] = f'{bin_paths[0]}:{env["PATH"]}'

Expand Down Expand Up @@ -464,12 +468,12 @@ async def process(
raise
if security_file is not None:
key_starts.append(_OUTDATED)
issue_check.add_check("outdated", "Check outdated version", False)
issue_check.add_check("outdated", "Check outdated version", checked=False)
else:
issue_check.remove_check("outdated")

if context.module_config.get("snyk", {}).get("enabled", configuration.ENABLE_SNYK_DEFAULT):
issue_check.add_check("snyk", "Check security vulnerabilities with Snyk", False)
issue_check.add_check("snyk", "Check security vulnerabilities with Snyk", checked=False)
key_starts.append("Snyk check/fix ")
else:
issue_check.remove_check("snyk")
Expand All @@ -492,7 +496,7 @@ async def process(
context.module_config.get("dpkg", {}).get("enabled", configuration.ENABLE_DPKG_DEFAULT)
and dpkg_version is not None
):
issue_check.add_check("dpkg", "Update dpkg packages", False)
issue_check.add_check("dpkg", "Update dpkg packages", checked=False)
key_starts.append("Dpkg ")
else:
issue_check.remove_check("dpkg")
Expand Down Expand Up @@ -563,9 +567,9 @@ async def process(

return _get_process_output(context, issue_check, short_message, success)

def get_json_schema(self) -> dict[str, Any]:
async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema of the module configuration."""
with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
with (Path(__file__) / "schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("properties", {}).get("audit") # type: ignore[no-any-return]

def get_github_application_permissions(self) -> module.GitHubApplicationPermissions:
Expand Down Expand Up @@ -599,7 +603,6 @@ def get_transversal_dashboard(
},
)
return module.TransversalDashboardOutput(
# template="dashboard.html",
renderer="github_app_geo_project:module/audit/dashboard.html",
data={"repositories": repositories},
)
41 changes: 23 additions & 18 deletions github_app_geo_project/module/audit/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import logging
import os.path
import subprocess
from pathlib import Path
from typing import NamedTuple

import apt_repo
Expand Down Expand Up @@ -297,7 +298,7 @@ async def _install_poetry_dependencies(
f"Dependencies installed from {file}",
f"Error while installing the dependencies from {file}",
f"Timeout while installing the dependencies from {file}",
os.path.dirname(os.path.abspath(file)),
Path(file).resolve().parent,
)
if proc_message is not None:
result.append(proc_message)
Expand Down Expand Up @@ -578,7 +579,7 @@ async def _snyk_fix(
)

cwd = module_utils.get_cwd()
project = "-" if cwd is None else os.path.basename(cwd)
project = "-" if cwd is None else Path(cwd).name
message = module_utils.HtmlMessage(
"<br>\n".join(
[
Expand All @@ -601,7 +602,7 @@ async def _npm_audit_fix(
messages: set[str] = set()
fix_success = True
for package_lock_file_name, file_messages in fixable_files_npm.items():
directory = os.path.dirname(os.path.abspath(package_lock_file_name))
directory = Path(package_lock_file_name).absolute().parent
messages.update(file_messages)
_LOGGER.debug("Fixing vulnerabilities in %s with npm audit fix", package_lock_file_name)
command = ["npm", "audit", "fix"]
Expand All @@ -612,19 +613,19 @@ async def _npm_audit_fix(
"Npm audit fix",
"Error while fixing the project",
"Timeout while fixing the project",
directory,
str(directory),
)
if message is not None:
result.append(message)
_LOGGER.debug("Fixing version in %s", package_lock_file_name)
# Remove the add '~' in the version in the package.json
with open(os.path.join(directory, "package.json"), encoding="utf-8") as package_file:
with (directory / "package.json").open(encoding="utf-8") as package_file:
package_json = json.load(package_file)
for dependencies_type in ("dependencies", "devDependencies"):
for package, version in package_json.get(dependencies_type, {}).items():
if version.startswith("^"):
package_json[dependencies_type][package] = version[1:]
with open(os.path.join(directory, "package.json"), "w", encoding="utf-8") as package_file:
with (directory / "package.json").open("w", encoding="utf-8") as package_file:
json.dump(package_json, package_file, indent=2)
_LOGGER.debug("Succeeded fix %s", package_lock_file_name)

Expand All @@ -644,8 +645,8 @@ def outdated_versions(
for row in security.data:
str_date = row[date_index]
if str_date not in ("Unsupported", "Best effort", "To be defined"):
date = datetime.datetime.strptime(row[date_index], "%d/%m/%Y")
if date < datetime.datetime.now():
date = datetime.datetime.strptime(row[date_index], "%d/%m/%Y").replace(tzinfo=datetime.UTC)
if date < datetime.datetime.now(datetime.UTC):
errors.append(
f"The version '{row[version_index]}' is outdated, it can be set to "
"'Unsupported', 'Best effort' or 'To be defined'",
Expand All @@ -667,7 +668,8 @@ def _get_sources(
if dist not in _SOURCES:
conf = local_config.get("sources", config.get("sources", configuration.DPKG_SOURCES_DEFAULT))
if dist not in conf:
raise ValueError(f"The distribution {dist} is not in the configuration")
message = f"The distribution {dist} is not in the configuration"
raise ValueError(message)
_SOURCES[dist] = apt_repo.APTSources(
[
apt_repo.APTRepository(
Expand All @@ -694,7 +696,7 @@ def _get_sources(
exception,
)
except AttributeError as exception:
_LOGGER.error("Error while loading the distribution %s: %s", dist, exception)
_LOGGER.error("Error while loading the distribution %s: %s", dist, exception) # noqa: TRY400

return _SOURCES[dist]

Expand All @@ -708,12 +710,13 @@ async def _get_packages_version(
global _GENERATION_TIME # pylint: disable=global-statement
if (
_GENERATION_TIME is None
or datetime.datetime.now() - utils.parse_duration(os.environ.get("GHCI_DPKG_CACHE_DURATION", "3h"))
or datetime.datetime.now(datetime.UTC)
- utils.parse_duration(os.environ.get("GHCI_DPKG_CACHE_DURATION", "3h"))
> _GENERATION_TIME
):
_PACKAGE_VERSION.clear()
_SOURCES.clear()
_GENERATION_TIME = datetime.datetime.now()
_GENERATION_TIME = datetime.datetime.now(datetime.UTC)
if package not in _PACKAGE_VERSION:
dist = package.split("/")[0]
await asyncio.to_thread(_get_sources, dist, config, local_config)
Expand All @@ -727,15 +730,17 @@ async def dpkg(
local_config: configuration.DpkgConfiguration,
) -> None:
"""Update the version of packages in the file .github/dpkg-versions.yaml or ci/dpkg-versions.yaml."""
if not os.path.exists("ci/dpkg-versions.yaml") and not os.path.exists(".github/dpkg-versions.yaml"):
ci_dpkg_versions_filename = Path(".github/dpkg-versions.yaml")
github_dpkg_versions_filename = Path("ci/dpkg-versions.yaml")

if not ci_dpkg_versions_filename.exists() and not github_dpkg_versions_filename.exists():
_LOGGER.warning("The file .github/dpkg-versions.yaml or ci/dpkg-versions.yaml does not exist")

dpkg_versions_filename = (
".github/dpkg-versions.yaml"
if os.path.exists(".github/dpkg-versions.yaml")
else "ci/dpkg-versions.yaml"
github_dpkg_versions_filename if github_dpkg_versions_filename.exists() else ci_dpkg_versions_filename
)
with open(dpkg_versions_filename, encoding="utf-8") as versions_file:

with dpkg_versions_filename.open(encoding="utf-8") as versions_file:
versions_config = yaml.load(versions_file, Loader=yaml.SafeLoader)
for versions in versions_config.values():
for package_full in versions:
Expand Down Expand Up @@ -768,5 +773,5 @@ async def dpkg(
exception,
)

with open(dpkg_versions_filename, "w", encoding="utf-8") as versions_file:
with dpkg_versions_filename.open("w", encoding="utf-8") as versions_file:
yaml.dump(versions_config, versions_file, Dumper=yaml.SafeDumper)
7 changes: 4 additions & 3 deletions github_app_geo_project/module/backport/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import os.path
import subprocess # nosec
import tempfile
from pathlib import Path
from typing import Any

import github
Expand Down Expand Up @@ -56,9 +57,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request", "push"},
)

def get_json_schema(self) -> dict[str, Any]:
async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the module."""
with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
with (Path(__file__).parent / "schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("properties", {}).get("backport") # type: ignore[no-any-return]

def get_actions(self, context: module.GetActionContext) -> list[module.Action[_ActionData]]:
Expand Down Expand Up @@ -285,7 +286,7 @@ async def _backport(
f"git push origin {backport_branch} --force",
],
)
with open("BACKPORT_TODO", "w", encoding="utf-8") as f:
with Path("BACKPORT_TODO").open("w", encoding="utf-8") as f:
f.write("\n".join(message))
command = ["git", "add", "BACKPORT_TODO"]
proc = await asyncio.create_subprocess_exec(*command)
Expand Down
9 changes: 5 additions & 4 deletions github_app_geo_project/module/clean/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import os.path
import subprocess # nosec
import tempfile
from pathlib import Path
from typing import Any, cast

import aiohttp
Expand Down Expand Up @@ -52,9 +53,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request", "delete"},
)

def get_json_schema(self) -> dict[str, Any]:
async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the module."""
with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
with (Path(__file__).parent / "schema.json").open(encoding="utf-8") as schema_file:
return json.loads(schema_file.read()).get("properties", {}).get("clean") # type: ignore[no-any-return]

def get_actions(self, context: module.GetActionContext) -> list[module.Action[_ActionData]]:
Expand Down Expand Up @@ -130,7 +131,7 @@ async def _clean_docker(
),
),
)
name = tag_publish.get_value(*pull_match)
name = tag_publish.get_value(*pull_match) # noqa: PLW2901

for repo in (
publish_config.get("docker", {})
Expand All @@ -154,7 +155,7 @@ async def _clean_docker(
continue
for image in publish_config.get("docker", {}).get("images", []):
for tag in image.get("tags", []):
tag = tag.format(version=name)
tag = tag.format(version=name) # noqa: PLW2901
_LOGGER.info("Cleaning %s/%s:%s", host, image["name"], tag)

if host == "docker.io":
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import datetime
import json
import logging
import os.path
from pathlib import Path
from typing import Any

from github_app_geo_project import module
Expand Down Expand Up @@ -42,9 +42,9 @@ def get_actions(self, context: module.GetActionContext) -> list[module.Action[di
return [module.Action(data={})]
return []

def get_json_schema(self) -> dict[str, Any]:
async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the module configuration."""
with open(os.path.join(os.path.dirname(__file__), "schema.json"), encoding="utf-8") as schema_file:
with (Path(__file__).parent / "schema.json").open(encoding="utf-8") as schema_file:
schema = json.loads(schema_file.read())
for key in ("$schema", "$id"):
if key in schema:
Expand Down Expand Up @@ -82,7 +82,7 @@ async def process(
status = rule.get("status")

arguments = {
"created": f"<{datetime.datetime.now() - datetime.timedelta(days=older_than_days):%Y-%m-%d}",
"created": f"<{datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=older_than_days):%Y-%m-%d}",
}
if actor:
arguments["actor"] = actor
Expand Down
9 changes: 3 additions & 6 deletions github_app_geo_project/module/pull_request/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
import asyncio
import json
import logging
import os
import re
import tempfile
import typing
from pathlib import Path
from typing import Any

import github
Expand Down Expand Up @@ -279,12 +279,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request"},
)

def get_json_schema(self) -> dict[str, Any]:
async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the configuration."""
with open(
os.path.join(os.path.dirname(__file__), "checks-schema.json"),
encoding="utf-8",
) as schema_file:
with (Path(__file__).parent / "checks-schema.json").open(encoding="utf-8") as schema_file:
schema = json.loads(schema_file.read())
for key in ("$schema", "$id"):
if key in schema:
Expand Down
9 changes: 3 additions & 6 deletions github_app_geo_project/module/pull_request/links.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
"""Module that adds some links to the pull request message."""

import json
import os
import re
from pathlib import Path
from typing import Any

import github
Expand Down Expand Up @@ -100,12 +100,9 @@ def get_github_application_permissions(self) -> module.GitHubApplicationPermissi
{"pull_request"},
)

def get_json_schema(self) -> dict[str, Any]:
async def get_json_schema(self) -> dict[str, Any]:
"""Get the JSON schema for the configuration."""
with open(
os.path.join(os.path.dirname(__file__), "links-schema.json"),
encoding="utf-8",
) as schema_file:
with (Path(__file__).perent / "links-schema.json").open(encoding="utf-8") as schema_file:
schema = json.loads(schema_file.read())
for key in ("$schema", "$id"):
if key in schema:
Expand Down
Loading

0 comments on commit efd59e1

Please sign in to comment.