diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml new file mode 100644 index 0000000..c6fdb00 --- /dev/null +++ b/.github/workflows/pylint.yml @@ -0,0 +1,26 @@ +name: Pylint Check + +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + + steps: + - name: Check out the code + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.11' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint + pip install -r requirements.txt + + - name: Run Pylint + run: | + pylint --disable=C0301,C0103,C0114,C0116,R0913,R0914,W1514,R0911,R0912,R1702,R0902,R0915,R0903,W0511,R0917,W0719,W0707 $(find . -name "*.py") diff --git a/kartograf/collectors/routeviews.py b/kartograf/collectors/routeviews.py index b341600..8784337 100644 --- a/kartograf/collectors/routeviews.py +++ b/kartograf/collectors/routeviews.py @@ -21,7 +21,7 @@ def latest_link(base): url = base + ym try: - response = requests.get(url) + response = requests.get(url, timeout=600) response.raise_for_status() except requests.exceptions.HTTPError: print(f"The page at {url} couldn't be fetched. " @@ -32,7 +32,7 @@ def latest_link(base): url = base + ym try: - response = requests.get(url) + response = requests.get(url, timeout=600) response.raise_for_status() except requests.exceptions.HTTPError: print(f"The page at {url} couldn't be fetched. " @@ -42,6 +42,7 @@ def latest_link(base): soup = BeautifulSoup(response.text, 'html.parser') links = [a["href"] for a in soup.find_all("a", href=True)] + latest = "" for link in links: if link.endswith(".pfx2as.gz"): diff --git a/kartograf/irr/parse.py b/kartograf/irr/parse.py index bffc5c8..16e057b 100644 --- a/kartograf/irr/parse.py +++ b/kartograf/irr/parse.py @@ -33,13 +33,13 @@ def parse_irr(context): lines = f.readlines() entry_list = [] - current_entry = dict() + current_entry = {} # Parse the RPSL objects in the IRR DB into Python Dicts for line in lines: if line == '\n': entry_list.append(current_entry) - current_entry = dict() + current_entry = {} else: if ":" in line: k, v = line.strip().split(':', 1) diff --git a/kartograf/kartograf.py b/kartograf/kartograf.py index 36a3e9c..049f5ea 100644 --- a/kartograf/kartograf.py +++ b/kartograf/kartograf.py @@ -3,7 +3,6 @@ import shutil import time -from . import __version__ from kartograf.context import Context from kartograf.coverage import coverage from kartograf.collectors.routeviews import extract_routeviews_pfx2as, fetch_routeviews_pfx2as @@ -21,8 +20,11 @@ wait_for_launch ) +from . import __version__ class Kartograf: + ''' Top level project class. ''' + @staticmethod def map(args): print_section_header("Start Kartograf") diff --git a/kartograf/rpki/fetch.py b/kartograf/rpki/fetch.py index 29f4bdd..fa3eea6 100644 --- a/kartograf/rpki/fetch.py +++ b/kartograf/rpki/fetch.py @@ -1,9 +1,11 @@ +import subprocess +import sys + from concurrent.futures import ThreadPoolExecutor from threading import Lock import os import pathlib import requests -import subprocess from tqdm import tqdm from kartograf.timed import timed @@ -26,7 +28,7 @@ def download_rir_tals(context): for rir, url in TAL_URLS.items(): try: - response = requests.get(url) + response = requests.get(url, timeout=600) response.raise_for_status() tal_path = os.path.join(context.data_dir_rpki_tals, f"{rir}.tal") @@ -38,17 +40,17 @@ def download_rir_tals(context): except requests.RequestException as e: print(f"Error downloading TAL for {rir.upper()}: {e}") - exit(1) + sys.exit(1) def data_tals(context): - tal_paths = [path for path in pathlib.Path(context.data_dir_rpki_tals).rglob('*.tal')] + tal_paths = list(pathlib.Path(context.data_dir_rpki_tals).rglob('*.tal')) # We need to have 5 TALs, one from each RIR if len(tal_paths) == 5: return tal_paths - else: - print("Not all 5 TALs could be downloaded.") - exit(1) + + print("Not all 5 TALs could be downloaded.") + sys.exit(1) @timed @@ -65,12 +67,14 @@ def fetch_rpki_db(context): "-d", context.data_dir_rpki_cache ] + tal_options, stdout=logs, - stderr=logs) + stderr=logs, + check=False) else: subprocess.run(["rpki-client", "-d", context.data_dir_rpki_cache ] + tal_options, - capture_output=True) + capture_output=True, + check=False) print(f"Downloaded RPKI Data, hash sum: {calculate_sha256_directory(context.data_dir_rpki_cache)}") @@ -104,7 +108,8 @@ def process_file(file): context.epoch, ] + tal_options + ["-f", file], # -f has to be last - capture_output=True) + capture_output=True, + check=False) if result.stderr and context.debug_log: stderr_output = result.stderr.decode() diff --git a/kartograf/util.py b/kartograf/util.py index b1259bf..1251fd5 100644 --- a/kartograf/util.py +++ b/kartograf/util.py @@ -1,3 +1,4 @@ +from functools import partial import hashlib import ipaddress import os @@ -19,11 +20,12 @@ def calculate_sha256(file_path): def calculate_sha256_directory(directory_path): sha256_hash = hashlib.sha256() - for root, dirs, files in os.walk(directory_path): + for root, _dirs, files in os.walk(directory_path): for file in sorted(files): file_path = os.path.join(root, file) with open(file_path, "rb") as f: - for byte_block in iter(lambda: f.read(4096), b""): + read_block = partial(f.read, 4096) + for byte_block in iter(read_block, b""): sha256_hash.update(byte_block) return sha256_hash.hexdigest() @@ -72,7 +74,8 @@ def check_compatibility(): if version_number < 8.4: raise Exception("Error: rpki-client version 8.4 or higher is " "required.") - elif version_number == latest_version: + + if version_number == latest_version: print(f"Using rpki-client version {version} (recommended).") elif version_number > latest_version: print("Warning: This kartograf version has not been tested with " @@ -121,7 +124,6 @@ def format_pfx(pfx): if '/' in pfx: formatted_pfx = str(ipaddress.ip_network(pfx)) return f"{formatted_pfx}" - else: - return str(ipaddress.ip_address(pfx)) + return str(ipaddress.ip_address(pfx)) except ValueError: return pfx