diff --git a/.flake8 b/.flake8 index f6ed7bb9..29425516 100644 --- a/.flake8 +++ b/.flake8 @@ -2,6 +2,9 @@ exclude = .flake8 .git -ignore = E722, # ignore bare excepts until I can get around to fixing them - W504 # line break after binary operator -max-line-length = 119 +extend-ignore = + # black compatibility + E203, E704, + # ignore bare excepts until I can get around to fixing them + E722, +max-line-length = 120 diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..8ad038dd --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,7 @@ +# .git-blame-ignore-revs +# run pre-commit against all files +376413493eca8298b8f2c17c8f02fc0d04285136 +# format all files with black +1583d515c0a767e2b69b3cdcd785008d2dd0cd0b +# format all files with isort +73b2a811edc4f366516c3a63da73fc5e769761e7 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d9873504..9b31cbf0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,35 +2,41 @@ on: push: branches: - master + - next pull_request: branches: - master + - next name: Run tests jobs: test: - name: nose tests + name: lint and test runs-on: ubuntu-latest strategy: matrix: - python-version: [ '3.6', '3.7' ] + python-version: ["3.11"] env: - HTTPOBS_BROKER_URL: fakebrokerurl HTTPOBS_DATABASE_HOST: fakehost HTTPOBS_DATABASE_PASS: foo HTTPOBS_DATABASE_USER: bar steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - name: Install poetry + run: pipx install poetry + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install . -r requirements.txt - - name: Run flake8 tests - run: flake8 --config .flake8 httpobs - - name: Run nose tests - run: nosetests httpobs/tests -e insert_test_result -e scored_test -e select_test_results -e test_retrieve --with-coverage --cover-package=httpobs + cache: "poetry" + - run: poetry install + + - uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: pre-commit|${{ hashFiles('.pre-commit-config.yaml') }} + - name: Run pre-commit against all files + run: poetry run pre-commit run --all-files + - name: Run nose tests + run: poetry run nosetests httpobs/tests --with-coverage --cover-package=httpobs diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..67b0c942 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +default_language_version: + python: python3.11 +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 23.12.1 + hooks: + - id: black + - repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + hooks: + - id: flake8 diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 498baa3f..041fbb69 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,8 +1,8 @@ # Community Participation Guidelines -This repository is governed by Mozilla's code of conduct and etiquette guidelines. +This repository is governed by Mozilla's code of conduct and etiquette guidelines. For more details, please read the -[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). +[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). ## How to Report For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page. diff --git a/httpobs/Dockerfile b/Dockerfile similarity index 55% rename from httpobs/Dockerfile rename to Dockerfile index 1b9e761c..49590014 100644 --- a/httpobs/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ # http-observatory -FROM python:3.5 +FROM python:3.11 MAINTAINER https://github.com/mozilla/http-observatory RUN groupadd --gid 1001 app && \ @@ -9,13 +9,13 @@ RUN install -o app -g app -d /var/run/httpobs /var/log/httpobs WORKDIR /app -COPY . httpobs +COPY pyproject.toml poetry.lock . +RUN pip install poetry && \ + poetry config virtualenvs.create false && \ + poetry install -RUN pip install --upgrade --no-cache-dir \ - -r httpobs/requirements.txt \ - -r httpobs/database/requirements.txt \ - -r httpobs/scanner/requirements.txt \ - -r httpobs/website/requirements.txt +COPY httpobs httpobs +RUN poetry install --no-dev ENV PYTHONPATH $PYTHONPATH:/app diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index f5145347..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,8 +0,0 @@ -include LICENSE -include MANIFEST.in -include README.md -include httpobs -include httpobs -recursive-include httpobs/conf *.conf *.json *.pem -global-exclude __pycache__ -global-exclude *.py[co] diff --git a/README.md b/README.md index 18d578a8..d2e77252 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Mozilla HTTP Observatory - [![Build Status](https://travis-ci.org/april/http-observatory.svg?branch=master)](https://travis-ci.org/april/http-observatory) [![Requirements Status](https://requires.io/github/mozilla/http-observatory/requirements.svg?branch=master)](https://requires.io/github/mozilla/http-observatory/requirements/?branch=master) +# Mozilla HTTP Observatory The Mozilla HTTP Observatory is a set of tools to analyze your website and inform you if you are utilizing the many available methods to secure it. @@ -16,31 +16,54 @@ Sites can be scanned using: * [observatory-cli](https://github.com/mozilla/observatory-cli) - the official node.js command line interface * [java-http-observatory-api](https://github.com/stoennies/java-http-observatory-api) - a third party java library and command line interface -## Contributing +## Development ### Prerequisites -* Python 3.7 +* Python 3.11 * Git -* pip3 +* pip #### Notes -These instructions assume that you have a working Python3.7 development environment with `pip3` installed and capable of building requirements, which may require installing an additional python OS package (`-dev`, `-devel`). +These instructions assume that you have a working Python3.11 development environment with `pip` installed and capable of building requirements, which may require installing an additional python OS package (`-dev`, `-devel`). -If this is not appropriate for your environment, you may install the appropriate requirements using your OS package manager (or other means) and skip the `pip3 -r requirements` command. +```bash +# Clone the code +$ git clone https://github.com/mozilla/http-observatory.git +$ cd http-observatory +# Install poetry +$ pip install poetry +# Install the project dependencies and scripts +$ poetry install +# Activate the virtual environment +$ poetry shell +# Install the pre-commit hooks +$ pre-commit install +# copy and edit the config file +$ cp httpobs/conf/httpobs.conf ~/.httpobs.conf +$ nano ~/.httpobs.conf +# start the dev server +$ httpobs-server +``` + +### Running tests + +```bash +$ nosetests httpobs/tests --with-coverage --cover-package=httpobs +``` ## Running a scan from the local codebase, without DB, for continuous integration ```bash # Install the HTTP Observatory $ git clone https://github.com/mozilla/http-observatory.git $ cd http-observatory -$ pip3 install --upgrade . -$ pip3 install --upgrade -r requirements.txt +$ pip install poetry +$ poetry install ``` -### Using the local scanner function calls +### Using the scanner function calls ```python ->>> from httpobs.scanner.local import scan +>>> from httpobs.scanner import scan >>> scan('observatory.mozilla.org') # a scan with default options >>> scan('observatory.mozilla.org', # all the custom options http_port=8080, # http server runs on port 8080 @@ -53,75 +76,11 @@ $ pip3 install --upgrade -r requirements.txt ### The same, but with the local CLI ```bash +$ poetry shell $ httpobs-local-scan --http-port 8080 --https-port 8443 --path '/foo/bar' \ --cookies '{"foo": "bar"}' --headers '{"X-Foo": "bar"}' --no-verify mozilla.org ``` -## Running a local scanner with Docker -* Install [Docker Toolbox](https://docs.docker.com/toolbox/overview/) and [VirtualBox](https://www.virtualbox.org/wiki/Downloads) - -```bash -# Install the HTTP Observatory client and requests library -$ git clone https://github.com/mozilla/http-observatory.git -$ cd http-observatory -$ pip3 install . -$ pip3 install --upgrade requests - -# Create docker machine -$ docker-machine create --driver virtualbox --virtualbox-disk-size "40000" http-observatory - -# Save the URL to the API in your .profile, .bash_profile, or whatever -$ echo export HTTPOBS_API_URL=http://$(docker-machine ip http-observatory):57001/api/v1 >> ~/.profile -$ . ~/.profile - -# Start up the docker instance and install all the pieces -$ eval $(docker-machine env http-observatory) -$ docker-compose up -d -``` - -## Creating a local installation (tested on Ubuntu 15) -``` -# Install git, postgresql, and redis -# sudo -s -# apt-get install -y git libpq-dev postgresql redis-server - -# Clone the repo -# cd /opt -# git clone https://github.com/mozilla/http-observatory.git -# cd http-observatory - -# Install the observatory and scanner -# pip install . -# pip3 install -r requirements.txt - -# Install the database -# su - postgres -$ createdb http_observatory -$ psql http_observatory < httpobs/database/schema.sql -$ psql http_observatory -http_observatory=# \password httpobsapi -http_observatory=# \password httpobsscanner -# vi /etc/postgresql/9.4/main/postgresql.conf (set max_connections = 512, shared_buffers = 256MB) -# service postgresql restart - -# Create the httpobs user, and log/pid directories -# useradd -m httpobs -# install -m 750 -o httpobs -g httpobs -d /var/run/httpobs /var/log/httpobs - -# Update the environmental variables -# su - httpobs -$ echo export HTTPOBS_API_URL="http://localhost:57001/api/v1" >> ~/.profile - -# Start the scanner -$ cd /opt/http-observatory -$ HTTPOBS_DATABASE_USER="httpobsscanner" HTTPOBS_DATABASE_PASS="....." \ - /opt/http-observatory/httpobs/scripts/httpobs-scan-worker - -# Start the API (in another terminal) -# HTTPOBS_DATABASE_USER="httpobsapi" HTTPOBS_DATABASE_PASS="....." \ - uwsgi --http :57001 --wsgi-file httpobs/website/main.py --processes 8 --callable app --master -``` - ## Authors * April King diff --git a/docker-compose.yml b/docker-compose.yml index be3c214c..e1956520 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,11 @@ version: '2' services: website: - build: ./httpobs - command: uwsgi --http :57001 --wsgi-file /app/httpobs/website/main.py --processes 8 --callable app --master + build: . + command: uwsgi --http :57001 --wsgi-file /app/httpobs/website/main.py --processes 1 --callable app --master depends_on: - postgres environment: - - HTTPOBS_BROKER_URL=redis://redis:6379/0 - HTTPOBS_DATABASE_HOST=postgres - HTTPOBS_DATABASE_PASS=httpobsapipassword - HTTPOBS_DATABASE_USER=httpobsapi @@ -14,24 +13,7 @@ services: - postgres ports: - "57001:57001" - restart: always - - # celery task for scanner - scanner: - build: ./httpobs - command: /app/httpobs/scripts/httpobs-scan-worker - depends_on: - - postgres - - redis - environment: - - HTTPOBS_BROKER_URL=redis://redis:6379/0 - - HTTPOBS_DATABASE_HOST=postgres - - HTTPOBS_DATABASE_PASS=httpobsscannerpassword - - HTTPOBS_DATABASE_USER=httpobsscanner - - HTTPOBS_MAX_CONCURRENCY=16 - links: - - postgres - - redis + restart: unless-stopped postgres: build: ./httpobs/database @@ -39,6 +21,3 @@ services: - POSTGRES_USER=httpobs - POSTGRES_PASSWORD=totallyfakepassword - POSTGRES_DB=http_observatory - - redis: - image: redis diff --git a/httpobs/__init__.py b/httpobs/__init__.py index 3fdbf744..0344c530 100644 --- a/httpobs/__init__.py +++ b/httpobs/__init__.py @@ -1,2 +1,12 @@ SOURCE_URL = 'https://github.com/mozilla/http-observatory' VERSION = '0.9.3' + +# The various statuses +STATE_ABORTED = 'ABORTED' +STATE_FAILED = 'FAILED' +STATE_FINISHED = 'FINISHED' +STATE_PENDING = 'PENDING' +STATE_STARTING = 'STARTING' +STATE_RUNNING = 'RUNNING' + +STATES = (STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_PENDING, STATE_RUNNING, STATE_STARTING) diff --git a/httpobs/conf/__init__.py b/httpobs/conf/__init__.py index 72a183df..40d03c1f 100644 --- a/httpobs/conf/__init__.py +++ b/httpobs/conf/__init__.py @@ -1,15 +1,13 @@ -from os import environ, cpu_count - import configparser import os.path import sys - +from os import environ # Read in the default config file if /etc/httpobs.conf doesn't already exist __dirname = os.path.abspath(os.path.dirname(__file__)) _config_parser = configparser.ConfigParser() -_config_parser.read_file(open(os.path.join(__dirname, 'httpobs.conf'))) # default values -_config_parser.read(['/etc/httpobs.conf', os.path.expanduser('~/.httpobs.conf')]) # overridden values +_config_parser.read_file(open(os.path.join(__dirname, 'httpobs.conf'))) # default values +_config_parser.read(['/etc/httpobs.conf', os.path.expanduser('~/.httpobs.conf')]) # overridden values # Return None if it's not in the config parser @@ -38,18 +36,19 @@ def __conf(section, param, type=None, default=None): DEVELOPMENT_MODE = True if environ.get('HTTPOBS_DEV') == 'yes' else False or __conf('global', 'development', bool) # API configuration -API_ALLOW_VERBOSE_STATS_FROM_PUBLIC = (environ.get('HTTPOBS_ALLOW_VERBOSE_STATS_FROM_PUBLIC') == 'yes' or - __conf('api', 'allow_verbose_stats_from_public', bool, True)) +API_ALLOW_VERBOSE_STATS_FROM_PUBLIC = environ.get('HTTPOBS_ALLOW_VERBOSE_STATS_FROM_PUBLIC') == 'yes' or __conf( + 'api', 'allow_verbose_stats_from_public', bool, True +) API_CACHED_RESULT_TIME = int(environ.get('HTTPOBS_API_CACHED_RESULT_TIME') or __conf('api', 'cached_result_time')) API_COOLDOWN = int(environ.get('HTTPOBS_API_COOLDOWN') or __conf('api', 'cooldown', int)) API_PORT = int(environ.get('HTTPOBS_API_PORT') or __conf('api', 'port', int)) -API_PROPAGATE_EXCEPTIONS = (True if environ.get('HTTPOBS_PROPAGATE_EXCEPTIONS') == 'yes' else False or - __conf('api', 'propagate_exceptions', bool)) +API_PROPAGATE_EXCEPTIONS = ( + True + if environ.get('HTTPOBS_PROPAGATE_EXCEPTIONS') == 'yes' + else False or __conf('api', 'propagate_exceptions', bool) +) API_URL = environ.get('HTTPOBS_API_URL') or __conf('api', 'url') -# Broker configuration -BROKER_URL = (environ.get('HTTPOBS_BROKER_URL') or __conf('scanner', 'broker')) - # Database configuration DATABASE_DB = environ.get('HTTPOBS_DATABASE_DB') or __conf('database', 'database') DATABASE_HOST = environ.get('HTTPOBS_DATABASE_HOST') or __conf('database', 'host') @@ -66,38 +65,20 @@ def __conf(section, param, type=None, default=None): DATABASE_SSL_MODE = 'prefer' # Retriever parameters -RETRIEVER_CONNECT_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_CONNECT_TIMEOUT') or - __conf('retriever', 'connect_timeout')) -RETRIEVER_READ_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_READ_TIMEOUT') or - __conf('retriever', 'read_timeout')) +RETRIEVER_CONNECT_TIMEOUT = float( + environ.get('HTTPOBS_RETRIEVER_CONNECT_TIMEOUT') or __conf('retriever', 'connect_timeout') +) +RETRIEVER_READ_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_READ_TIMEOUT') or __conf('retriever', 'read_timeout')) RETRIEVER_USER_AGENT = environ.get('HTTPOBS_RETRIEVER_USER_AGENT') or __conf('retriever', 'user_agent') RETRIEVER_CORS_ORIGIN = environ.get('HTTPOBS_RETRIEVER_CORS_ORIGIN') or __conf('retriever', 'cors_origin') # Scanner configuration -SCANNER_ABORT_SCAN_TIME = int(environ.get('HTTPOBS_SCANNER_ABORT_SCAN_TIME') or - __conf('scanner', 'abort_scan_time')) -SCANNER_ALLOW_KICKSTART = (environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART') == 'yes' or - __conf('scanner', 'allow_kickstart', bool)) -SCANNER_ALLOW_KICKSTART_NUM_ABORTED = int(environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART_NUM_ABORTED') or - __conf('scanner', 'allow_kickstart_num_aborted')) -SCANNER_ALLOW_LOCALHOST = (environ.get('HTTPOBS_SCANNER_ALLOW_LOCALHOST') == 'yes' or - __conf('scanner', 'allow_localhost', bool)) -SCANNER_BROKER_RECONNECTION_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_BROKER_RECONNECTION_SLEEP_TIME') or - __conf('scanner', 'broker_reconnection_sleep_time')) -SCANNER_CYCLE_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_CYCLE_SLEEP_TIME') or - __conf('scanner', 'cycle_sleep_time')) -SCANNER_DATABASE_RECONNECTION_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_DATABASE_RECONNECTION_SLEEP_TIME') or - __conf('scanner', 'database_reconnection_sleep_time')) -SCANNER_MAINTENANCE_CYCLE_FREQUENCY = int(environ.get('HTTPOBS_MAINTENANCE_CYCLE_FREQUENCY') or - __conf('scanner', 'maintenance_cycle_frequency')) -SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY = int(environ.get('HTTPOBS_SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY') or - __conf('scanner', 'materialized_view_refresh_frequency')) -SCANNER_MAX_CPU_UTILIZATION = int(environ.get('HTTPOBS_SCANNER_MAX_CPU_UTILIZATION') or - __conf('scanner', 'max_cpu_utilization')) -SCANNER_MAX_LOAD_RATIO = int(environ.get('HTTPOBS_SCANNER_MAX_LOAD_RATIO_PER_CPU') or - __conf('scanner', 'max_load_ratio_per_cpu')) -SCANNER_MAX_LOAD = cpu_count() * SCANNER_MAX_LOAD_RATIO -SCANNER_MOZILLA_DOMAINS = [domain.strip() for domain in (environ.get('HTTPOBS_SCANNER_MOZILLA_DOMAINS') or - __conf('scanner', 'mozilla_domains')).split(',')] -SCANNER_PINNED_DOMAINS = [domain.strip() for domain in (environ.get('HTTPOBS_SCANNER_PINNED_DOMAINS') or - __conf('scanner', 'pinned_domains')).split(',')] +SCANNER_ABORT_SCAN_TIME = int(environ.get('HTTPOBS_SCANNER_ABORT_SCAN_TIME') or __conf('scanner', 'abort_scan_time')) +SCANNER_MOZILLA_DOMAINS = [ + domain.strip() + for domain in (environ.get('HTTPOBS_SCANNER_MOZILLA_DOMAINS') or __conf('scanner', 'mozilla_domains')).split(',') +] +SCANNER_PINNED_DOMAINS = [ + domain.strip() + for domain in (environ.get('HTTPOBS_SCANNER_PINNED_DOMAINS') or __conf('scanner', 'pinned_domains')).split(',') +] diff --git a/httpobs/conf/hsts-preload.json b/httpobs/conf/hsts-preload.json index 0bb6d2e3..1c4e3ed8 100644 --- a/httpobs/conf/hsts-preload.json +++ b/httpobs/conf/hsts-preload.json @@ -1066793,4 +1066793,4 @@ "mode": "force-https", "pinned": false } -} \ No newline at end of file +} diff --git a/httpobs/conf/httpobs.conf b/httpobs/conf/httpobs.conf index b80654cf..a4bb3d9b 100644 --- a/httpobs/conf/httpobs.conf +++ b/httpobs/conf/httpobs.conf @@ -25,16 +25,5 @@ user_agent = Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:98.0) Gecko/201001 [scanner] abort_scan_time = 1800 -allow_kickstart = no -allow_kickstart_num_aborted = 5 -allow_localhost = no -broker = redis://localhost:6379/0 -broker_reconnection_sleep_time = 15 -cycle_sleep_time = .5 -database_reconnection_sleep_time = 5 -maintenance_cycle_frequency = 900 -materialized_view_refresh_frequency = 3600 -max_cpu_utilization = 90 -max_load_ratio_per_cpu = 3 mozilla_domains = mozilla,allizom,browserid,firefox,persona,taskcluster,webmaker pinned_domains = accounts.firefox.com,addons.mozilla.org,aus4.mozilla.org,aus5.mozilla.org,cdn.mozilla.org,services.mozilla.com diff --git a/httpobs/database/.dockerignore b/httpobs/database/.dockerignore index 347179a0..0ebe5ca6 100644 --- a/httpobs/database/.dockerignore +++ b/httpobs/database/.dockerignore @@ -1,3 +1,3 @@ __pycache__ data -.DS_Store \ No newline at end of file +.DS_Store diff --git a/httpobs/database/Dockerfile b/httpobs/database/Dockerfile index d65135be..d8866402 100644 --- a/httpobs/database/Dockerfile +++ b/httpobs/database/Dockerfile @@ -2,4 +2,4 @@ FROM postgres ADD schema.sql /docker-entrypoint-initdb.d/ -ADD schema.sql.docker.sql /docker-entrypoint-initdb.d/ \ No newline at end of file +ADD schema.sql.docker.sql /docker-entrypoint-initdb.d/ diff --git a/httpobs/database/__init__.py b/httpobs/database/__init__.py index 76f8ea46..cffca495 100644 --- a/httpobs/database/__init__.py +++ b/httpobs/database/__init__.py @@ -1,25 +1,23 @@ -from .database import (get_cursor, - insert_scan, - insert_scan_grade, - insert_test_results, - periodic_maintenance, - refresh_materialized_views, - select_scan_host_history, - select_scan_recent_finished_scans, - select_scan_recent_scan, - select_scan_scanner_statistics, - select_site_headers, - select_site_id, - select_star_from, - select_test_results, - update_scan_state, - update_scans_dequeue_scans) +from .database import ( + get_cursor, + insert_scan, + insert_test_results, + periodic_maintenance, + refresh_materialized_views, + select_scan_host_history, + select_scan_recent_finished_scans, + select_scan_recent_scan, + select_scan_scanner_statistics, + select_site_headers, + select_site_id, + select_star_from, + select_test_results, + update_scan_state, +) __all__ = [ - 'abort_broken_scans', 'get_cursor', 'insert_scan', - 'insert_scan_grade', 'insert_test_results', 'refresh_materialized_views', 'select_scan_host_history', @@ -32,5 +30,4 @@ 'select_test_results', 'update_scan_state', 'periodic_maintenance', - 'update_scans_dequeue_scans', ] diff --git a/httpobs/database/celeryconfig.py b/httpobs/database/celeryconfig.py deleted file mode 100644 index d58b2a2a..00000000 --- a/httpobs/database/celeryconfig.py +++ /dev/null @@ -1,21 +0,0 @@ -from httpobs.conf import BROKER_URL - - -# Set the Celery task queue -BROKER_URL = BROKER_URL - -CELERY_ACCEPT_CONTENT = ['json'] -CELERY_IGNORE_RESULTS = True -CELERY_REDIRECT_STDOUTS_LEVEL = 'WARNING' -CELERY_RESULT_SERIALIZER = 'json' -CELERY_TASK_SERIALIZER = 'json' - -CELERYD_TASK_SOFT_TIME_LIMIT = 300 -CELERYD_TASK_TIME_LIMIT = 600 - -CELERYBEAT_SCHEDULE = { - 'abort-broken-scans': { - 'task': 'httpobs.database.tasks.abort_broken_scans', - 'schedule': 1800, - } -} diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 53be2c73..d9f0a65b 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -1,30 +1,25 @@ +import sys from contextlib import contextmanager from json import dumps -from types import SimpleNamespace from os import getpid - -from httpobs.conf import (API_CACHED_RESULT_TIME, - DATABASE_CA_CERT, - DATABASE_DB, - DATABASE_HOST, - DATABASE_PASSWORD, - DATABASE_PORT, - DATABASE_SSL_MODE, - DATABASE_USER, - SCANNER_ABORT_SCAN_TIME) -from httpobs.scanner import (ALGORITHM_VERSION, - STATE_ABORTED, - STATE_FAILED, - STATE_FINISHED, - STATE_PENDING, - STATE_STARTING) -from httpobs.scanner.analyzer import NUM_TESTS -from httpobs.scanner.grader import get_grade_and_likelihood_for_score, MINIMUM_SCORE_FOR_EXTRA_CREDIT +from types import SimpleNamespace import psycopg2 import psycopg2.extras import psycopg2.pool -import sys + +from httpobs import STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_RUNNING +from httpobs.conf import ( + API_CACHED_RESULT_TIME, + DATABASE_CA_CERT, + DATABASE_DB, + DATABASE_HOST, + DATABASE_PASSWORD, + DATABASE_PORT, + DATABASE_SSL_MODE, + DATABASE_USER, + SCANNER_ABORT_SCAN_TIME, +) class SimpleDatabaseConnection: @@ -35,13 +30,15 @@ def __init__(self): def _connect(self): try: - self._conn = psycopg2.connect(database=DATABASE_DB, - host=DATABASE_HOST, - password=DATABASE_PASSWORD, - port=DATABASE_PORT, - sslmode=DATABASE_SSL_MODE, - sslrootcert=DATABASE_CA_CERT, - user=DATABASE_USER) + self._conn = psycopg2.connect( + database=DATABASE_DB, + host=DATABASE_HOST, + password=DATABASE_PASSWORD, + port=DATABASE_PORT, + sslmode=DATABASE_SSL_MODE, + sslrootcert=DATABASE_CA_CERT, + user=DATABASE_USER, + ) if not self._connected: print('INFO: Connected to PostgreSQL', file=sys.stderr) @@ -99,74 +96,65 @@ def get_cursor(): def insert_scan(site_id: int, hidden: bool = False) -> dict: with get_cursor() as cur: - cur.execute("""INSERT INTO scans (site_id, state, start_time, algorithm_version, tests_quantity, hidden) - VALUES (%s, %s, NOW(), %s, %s, %s) - RETURNING *""", - (site_id, STATE_PENDING, ALGORITHM_VERSION, NUM_TESTS, hidden)) - - return dict(cur.fetchone()) - - -def insert_scan_grade(scan_id, scan_grade, scan_score) -> dict: - with get_cursor() as cur: - cur.execute("""UPDATE scans - SET (grade, score) = - (%s, %s) - WHERE id = %s + cur.execute( + """INSERT INTO scans (site_id, state, start_time, tests_quantity, hidden) + VALUES (%s, %s, NOW(), 0, %s) RETURNING *""", - (scan_grade, scan_score, scan_id)) + (site_id, STATE_RUNNING, hidden), + ) return dict(cur.fetchone()) -# TODO: Separate out some of this logic so it doesn't need to be duplicated in local.scan() -def insert_test_results(site_id: int, - scan_id: int, - tests: list, - response_headers: dict, - status_code: int = None) -> dict: +def insert_test_results(site_id: int, scan_id: int, data: dict) -> dict: with get_cursor() as cur: - tests_failed = tests_passed = 0 - score_with_extra_credit = uncurved_score = 100 - - for test in tests: - name = test.pop('name') + for name, test in data["tests"].items(): expectation = test.pop('expectation') passed = test.pop('pass') result = test.pop('result') score_modifier = test.pop('score_modifier') - - # Keep track of how many tests passed or failed - if passed: - tests_passed += 1 - else: - tests_failed += 1 - - # And keep track of the score - score_with_extra_credit += score_modifier - if score_modifier < 0: - uncurved_score += score_modifier + del test["score_description"] # Insert test result to the database - cur.execute("""INSERT INTO tests (site_id, scan_id, name, expectation, result, pass, output, score_modifier) + cur.execute( + """INSERT INTO tests (site_id, scan_id, name, expectation, result, pass, output, score_modifier) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""", - (site_id, scan_id, name, expectation, result, passed, dumps(test), score_modifier)) - - # Only record the full score if the uncurved score already receives an A - score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score - - # Now we need to update the scans table - score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score) + (site_id, scan_id, name, expectation, result, passed, dumps(test), score_modifier), + ) + + scan = data["scan"] + algorithm_version = scan["algorithm_version"] + tests_failed = scan["tests_failed"] + tests_passed = scan["tests_passed"] + tests_quantity = scan["tests_quantity"] + grade = scan["grade"] + score = scan["score"] + likelihood_indicator = scan["likelihood_indicator"] + response_headers = scan["response_headers"] + status_code = scan["status_code"] # Update the scans table - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET (end_time, tests_failed, tests_passed, grade, score, likelihood_indicator, - state, response_headers, status_code) = - (NOW(), %s, %s, %s, %s, %s, %s, %s, %s) + state, response_headers, status_code, algorithm_version, tests_quantity) = + (NOW(), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) WHERE id = %s RETURNING *""", - (tests_failed, tests_passed, grade, score, likelihood_indicator, STATE_FINISHED, - dumps(response_headers), status_code, scan_id)) + ( + tests_failed, + tests_passed, + grade, + score, + likelihood_indicator, + STATE_FINISHED, + dumps(response_headers), + status_code, + algorithm_version, + tests_quantity, + scan_id, + ), + ) row = dict(cur.fetchone()) @@ -181,13 +169,15 @@ def periodic_maintenance() -> int: """ with get_cursor() as cur: # Mark all scans that have been sitting unfinished for at least SCANNER_ABORT_SCAN_TIME as ABORTED - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET (state, end_time) = (%s, NOW()) WHERE state != %s AND state != %s AND state != %s AND start_time < NOW() - INTERVAL '%s seconds';""", - (STATE_ABORTED, STATE_ABORTED, STATE_FAILED, STATE_FINISHED, SCANNER_ABORT_SCAN_TIME)) + (STATE_ABORTED, STATE_ABORTED, STATE_FAILED, STATE_FINISHED, SCANNER_ABORT_SCAN_TIME), + ) return cur.rowcount @@ -220,21 +210,25 @@ def select_star_from(table: str) -> dict: def select_scan_host_history(site_id: int) -> list: # Get all of the site's historic scans with get_cursor() as cur: - cur.execute("""SELECT id, grade, score, end_time FROM scans + cur.execute( + """SELECT id, grade, score, end_time FROM scans WHERE site_id = %s AND state = %s ORDER BY end_time ASC;""", - (site_id, STATE_FINISHED)) + (site_id, STATE_FINISHED), + ) if cur.rowcount > 0: - return([ + return [ { 'scan_id': row['id'], 'grade': row['grade'], 'score': row['score'], 'end_time': row['end_time'], - 'end_time_unix_timestamp': int(row['end_time'].timestamp()) - } for row in cur.fetchall()]) + 'end_time_unix_timestamp': int(row['end_time'].timestamp()), + } + for row in cur.fetchall() + ] else: return [] @@ -265,13 +259,15 @@ def select_scan_scanner_statistics(verbose: bool = False) -> dict: states = dict(cur.fetchall()) # Get the recent scan count - cur.execute("""SELECT DATE_TRUNC('hour', end_time) AS hour, COUNT(*) as num_scans + cur.execute( + """SELECT DATE_TRUNC('hour', end_time) AS hour, COUNT(*) as num_scans FROM scans WHERE (end_time < DATE_TRUNC('hour', NOW())) AND (end_time >= DATE_TRUNC('hour', NOW()) - INTERVAL '24 hours') GROUP BY hour ORDER BY hour DESC;""", - (STATE_FINISHED,)) + (STATE_FINISHED,), + ) recent_scans = dict(cur.fetchall()).items() else: recent_scans = {} @@ -292,7 +288,8 @@ def select_scan_recent_finished_scans(num_scans=10, min_score=0, max_score=100) # Used for /api/v1/getRecentScans # Fix from: https://gist.github.com/april/61efa9ff197828bf5ab13e5a00be9138 with get_cursor() as cur: - cur.execute("""SELECT sites.domain, s2.grade + cur.execute( + """SELECT sites.domain, s2.grade FROM (SELECT DISTINCT ON (s1.site_id) s1.site_id, s1.grade, s1.end_time FROM @@ -307,19 +304,22 @@ def select_scan_recent_finished_scans(num_scans=10, min_score=0, max_score=100) ORDER BY s1.site_id, s1.end_time DESC) s2 INNER JOIN sites ON (sites.id = s2.site_id) ORDER BY s2.end_time DESC LIMIT %s;""", - (STATE_FINISHED, min_score, max_score, num_scans * 2, num_scans)) + (STATE_FINISHED, min_score, max_score, num_scans * 2, num_scans), + ) return dict(cur.fetchall()) def select_scan_recent_scan(site_id: int, recent_in_seconds=API_CACHED_RESULT_TIME) -> dict: with get_cursor() as cur: - cur.execute("""SELECT * FROM scans + cur.execute( + """SELECT * FROM scans WHERE site_id = %s AND start_time >= NOW() - INTERVAL '%s seconds' ORDER BY start_time DESC LIMIT 1""", - (site_id, recent_in_seconds)) + (site_id, recent_in_seconds), + ) if cur.rowcount > 0: return dict(cur.fetchone()) @@ -330,11 +330,13 @@ def select_scan_recent_scan(site_id: int, recent_in_seconds=API_CACHED_RESULT_TI def select_site_headers(hostname: str) -> dict: # Return the site's headers with get_cursor() as cur: - cur.execute("""SELECT public_headers, private_headers, cookies FROM sites + cur.execute( + """SELECT public_headers, private_headers, cookies FROM sites WHERE domain = %s ORDER BY creation_time DESC LIMIT 1""", - (hostname,)) + (hostname,), + ) # If it has headers, merge the public and private headers together if cur.rowcount > 0: @@ -344,10 +346,7 @@ def select_site_headers(hostname: str) -> dict: private_headers = {} if row.get('private_headers') is None else row.get('private_headers') headers.update(private_headers) - return { - 'cookies': {} if row.get('cookies') is None else row.get('cookies'), - 'headers': headers - } + return {'cookies': {} if row.get('cookies') is None else row.get('cookies'), 'headers': headers} else: return {} @@ -355,20 +354,25 @@ def select_site_headers(hostname: str) -> dict: def select_site_id(hostname: str) -> int: # See if the site exists already with get_cursor() as cur: - cur.execute("""SELECT id FROM sites + cur.execute( + """SELECT id FROM sites WHERE domain = %s ORDER BY creation_time DESC LIMIT 1""", - (hostname,)) + (hostname,), + ) if cur.rowcount > 0: return cur.fetchone()['id'] # If not, let's create the site with get_cursor() as cur: - cur.execute("""INSERT INTO sites (domain, creation_time) + cur.execute( + """INSERT INTO sites (domain, creation_time) VALUES (%s, NOW()) - RETURNING id""", (hostname,)) + RETURNING id""", + (hostname,), + ) return cur.fetchone()['id'] @@ -390,40 +394,26 @@ def select_test_results(scan_id: int) -> dict: def update_scan_state(scan_id, state: str, error=None) -> dict: if error: with get_cursor() as cur: - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET (state, end_time, error) = (%s, NOW(), %s) WHERE id = %s RETURNING *""", - (state, error, scan_id)) + (state, error, scan_id), + ) row = dict(cur.fetchone()) else: with get_cursor() as cur: - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET state = %s WHERE id = %s RETURNING *""", - (state, scan_id)) + (state, scan_id), + ) row = dict(cur.fetchone()) return row - - -def update_scans_dequeue_scans(num_to_dequeue: int = 0) -> dict: - with get_cursor() as cur: - cur.execute("""UPDATE scans - SET state = %s - FROM ( - SELECT sites.domain, scans.site_id, scans.id AS scan_id, scans.state - FROM scans - INNER JOIN sites ON scans.site_id = sites.id - WHERE state = %s - LIMIT %s - FOR UPDATE) sub - WHERE scans.id = sub.scan_id - RETURNING sub.domain, sub.site_id, sub.scan_id""", - (STATE_STARTING, STATE_PENDING, num_to_dequeue)) - - return cur.fetchall() diff --git a/httpobs/database/requirements.txt b/httpobs/database/requirements.txt deleted file mode 100644 index 5641d61b..00000000 --- a/httpobs/database/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -psycopg2>=2.7,<2.8 --no-binary psycopg2 # By constraining psycopg2 to less than 2.8 we can't use Python 3.8 due to https://github.com/psycopg/psycopg2/issues/854#issuecomment-611791946 -redis==2.10.6 diff --git a/httpobs/database/schema.sql b/httpobs/database/schema.sql index b55d3022..7dd27502 100644 --- a/httpobs/database/schema.sql +++ b/httpobs/database/schema.sql @@ -61,13 +61,6 @@ CREATE INDEX tests_name_idx ON tests (name); CREATE INDEX tests_result_idx ON tests (result); CREATE INDEX tests_pass_idx ON tests (pass); -CREATE USER httpobsscanner; -GRANT SELECT on sites, scans, expectations, tests TO httpobsscanner; -GRANT UPDATE (domain) ON sites to httpobsscanner; /* TODO: there's got to be a better way with SELECT ... FOR UPDATE */ -GRANT UPDATE on scans TO httpobsscanner; -GRANT INSERT on tests TO httpobsscanner; -GRANT USAGE ON SEQUENCE tests_id_seq TO httpobsscanner; - CREATE USER httpobsapi; GRANT SELECT ON expectations, scans, tests to httpobsapi; GRANT SELECT (id, domain, creation_time, public_headers) ON sites TO httpobsapi; @@ -78,6 +71,12 @@ GRANT USAGE ON SEQUENCE sites_id_seq TO httpobsapi; GRANT USAGE ON SEQUENCE scans_id_seq TO httpobsapi; GRANT USAGE ON SEQUENCE expectations_id_seq TO httpobsapi; +GRANT SELECT on sites, scans, expectations, tests TO httpobsapi; +GRANT UPDATE (domain) ON sites to httpobsapi; /* TODO: there's got to be a better way with SELECT ... FOR UPDATE */ +GRANT UPDATE on scans TO httpobsapi; +GRANT INSERT on tests TO httpobsapi; +GRANT USAGE ON SEQUENCE tests_id_seq TO httpobsapi; + CREATE INDEX scans_site_id_finished_state_end_time_idx ON scans (site_id, state, end_time DESC) WHERE state = 'FINISHED'; CREATE MATERIALIZED VIEW latest_scans @@ -129,7 +128,7 @@ ALTER TABLE scans ADD COLUMN likelihood_indicator VARCHAR NULL; /* Update to frequently refresh latest_scans */ /* GRANT SELECT ON latest_scans TO httpobsapi; -ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsscanner; +ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsapi; */ /* Update to add earliest scans and a way to compare earliest and latest */ @@ -163,16 +162,16 @@ CREATE UNIQUE INDEX scan_score_difference_distribution_summation_difference_idx COMMENT ON MATERIALIZED VIEW scan_score_difference_distribution_summation IS 'How many sites have improved by how many points'; GRANT SELECT ON scan_score_difference_distribution_summation TO httpobsapi; -ALTER MATERIALIZED VIEW grade_distribution OWNER TO httpobsscanner; /* so it can refresh */ -ALTER MATERIALIZED VIEW grade_distribution_all_scans OWNER TO httpobsscanner; /* so it can refresh */ -ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW earliest_scans OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW scan_score_difference_distribution OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW scan_score_difference_distribution_summation OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW latest_tests OWNER TO httpobsscanner; +ALTER MATERIALIZED VIEW grade_distribution OWNER TO httpobsapi; /* so it can refresh */ +ALTER MATERIALIZED VIEW grade_distribution_all_scans OWNER TO httpobsapi; /* so it can refresh */ +ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW earliest_scans OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW scan_score_difference_distribution OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW scan_score_difference_distribution_summation OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW latest_tests OWNER TO httpobsapi; /* Database updates to allow us to track changes in scoring over time */ /* ALTER TABLE scans ADD COLUMN algorithm_version SMALLINT NOT NULL DEFAULT 1; CREATE INDEX scans_algorithm_version_idx ON scans (algorithm_version); -*/ \ No newline at end of file +*/ diff --git a/httpobs/database/schema.sql.docker.sql b/httpobs/database/schema.sql.docker.sql index f6bbbd45..dd7fc924 100644 --- a/httpobs/database/schema.sql.docker.sql +++ b/httpobs/database/schema.sql.docker.sql @@ -1,4 +1,3 @@ /* silly alphabetical naming requirements */ ALTER ROLE httpobsapi LOGIN PASSWORD 'httpobsapipassword'; -ALTER ROLE httpobsscanner LOGIN PASSWORD 'httpobsscannerpassword'; \ No newline at end of file diff --git a/httpobs/docs/contribute.json b/httpobs/docs/contribute.json index 580dbcb9..e8ef8cce 100644 --- a/httpobs/docs/contribute.json +++ b/httpobs/docs/contribute.json @@ -4,14 +4,14 @@ "repository": { "url": "https://github.com/mozilla/http-observatory", "license": "MPL2", - "tests": "https://travis-ci.org/mozilla/http-observatory" + "tests": "https://github.com/mozilla/http-observatory/actions" }, "participate": { "home": "https://observatory.mozilla.org/", "docs": "https://github.com/mozilla/http-observatory/tree/master/httpobs/docs", - "irc": "irc://irc.mozilla.org/#observatory", - "irc-contacts": [ - "April" + "slack": "https://mozilla.slack.com/", + "slack-contacts": [ + "leo" ] }, "bugs": { @@ -21,8 +21,8 @@ }, "urls": { "prod": "https://observatory.mozilla.org/", - "stage": "https://mozilla.github.io/http-observatory-website/", - "dev": "https://mozilla.github.io/http-observatory-website/" + "stage": "https://observatory.allizom.org/", + "dev": "https://observatory.allizom.org/" }, "keywords": [ "python", diff --git a/httpobs/requirements.txt b/httpobs/requirements.txt deleted file mode 100644 index 7f2f3340..00000000 --- a/httpobs/requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ -amqp==2.3.2 -beautifulsoup4==4.6.3 -billiard==3.5.0.4 -celery==4.2.1 -click==7.0 -coverage==4.5.2 -flake8==3.6.0 -httpobs-cli==1.0.2 -itsdangerous==1.1.0 -kombu==4.2.1 -MarkupSafe==1.1.0 -mccabe==0.6.1 -nose==1.3.7 -pep8==1.7.1 -pycodestyle==2.4.0 -pyflakes==2.0.0 -pytz==2018.7 -vine==1.1.4 -Werkzeug==0.14.1 diff --git a/httpobs/scanner/__init__.py b/httpobs/scanner/__init__.py index 3ac10ee3..bb1bf30c 100644 --- a/httpobs/scanner/__init__.py +++ b/httpobs/scanner/__init__.py @@ -1,24 +1,3 @@ -# Current algorithm version -ALGORITHM_VERSION = 2 +from .scanner import scan -# The various statuses -STATE_ABORTED = 'ABORTED' -STATE_FAILED = 'FAILED' -STATE_FINISHED = 'FINISHED' -STATE_PENDING = 'PENDING' -STATE_STARTING = 'STARTING' -STATE_RUNNING = 'RUNNING' - -STATES = (STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_PENDING, STATE_RUNNING, STATE_STARTING) - -__all__ = [ - 'ALGORITHM_VERSION', - 'NUM_TESTS', - 'STATES', - 'STATE_ABORTED', - 'STATE_FAILED', - 'STATE_FINISHED', - 'STATE_PENDING', - 'STATE_RUNNING', - 'STATE_STARTING', -] +__all__ = ["scan"] diff --git a/httpobs/scanner/analyzer/__init__.py b/httpobs/scanner/analyzer/__init__.py index 2c549427..7f3e6234 100644 --- a/httpobs/scanner/analyzer/__init__.py +++ b/httpobs/scanner/analyzer/__init__.py @@ -1,13 +1,17 @@ from .content import contribute, subresource_integrity -from .headers import (content_security_policy, cookies, public_key_pinning, referrer_policy, strict_transport_security, - x_content_type_options, x_xss_protection, x_frame_options) +from .headers import ( + content_security_policy, + cookies, + public_key_pinning, + referrer_policy, + strict_transport_security, + x_content_type_options, + x_frame_options, + x_xss_protection, +) from .misc import cross_origin_resource_sharing, redirection -__all__ = [ - 'NUM_TESTS', - 'tests', - 'TEST_NAMES' -] +__all__ = ['NUM_TESTS', 'tests', 'TEST_NAMES'] tests = ( content_security_policy, diff --git a/httpobs/scanner/analyzer/content.py b/httpobs/scanner/analyzer/content.py index a8db7bef..6b0e060b 100644 --- a/httpobs/scanner/analyzer/content.py +++ b/httpobs/scanner/analyzer/content.py @@ -1,16 +1,14 @@ +import json +from urllib.parse import urlparse + from bs4 import BeautifulSoup as bs from publicsuffixlist import PublicSuffixList -from urllib.parse import urlparse from httpobs.conf import SCANNER_MOZILLA_DOMAINS from httpobs.scanner.analyzer.decorators import scored_test from httpobs.scanner.analyzer.utils import only_if_worse from httpobs.scanner.retriever.retriever import HTML_TYPES - -import json - - # Compat between Python 3.4 and Python 3.5 (see: https://github.com/mozilla/http-observatory-website/issues/14) if not hasattr(json, 'JSONDecodeError'): # pragma: no cover json.JSONDecodeError = ValueError @@ -124,12 +122,14 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- response = reqs['responses']['auto'] # The order of how "good" the results are - goodness = ['sri-implemented-and-all-scripts-loaded-securely', - 'sri-implemented-and-external-scripts-loaded-securely', - 'sri-implemented-but-external-scripts-not-loaded-securely', - 'sri-not-implemented-but-external-scripts-loaded-securely', - 'sri-not-implemented-and-external-scripts-not-loaded-securely', - 'sri-not-implemented-response-not-html'] + goodness = [ + 'sri-implemented-and-all-scripts-loaded-securely', + 'sri-implemented-and-external-scripts-loaded-securely', + 'sri-implemented-but-external-scripts-not-loaded-securely', + 'sri-not-implemented-but-external-scripts-loaded-securely', + 'sri-not-implemented-and-external-scripts-not-loaded-securely', + 'sri-not-implemented-response-not-html', + ] # If the content isn't HTML, there's no scripts to load; this is okay if response.headers.get('Content-Type', '').split(';')[0] not in HTML_TYPES: @@ -158,8 +158,11 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- # Check to see if they're on the same second-level domain # TODO: update the PSL list on startup psl = PublicSuffixList() - samesld = True if (psl.privatesuffix(urlparse(response.url).netloc) == - psl.privatesuffix(src.netloc)) else False + samesld = ( + True + if (psl.privatesuffix(urlparse(response.url).netloc) == psl.privatesuffix(src.netloc)) + else False + ) if src.scheme == '': if src.netloc == '': @@ -189,29 +192,28 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- # Add it to the scripts data result, if it's not a relative URI if not secureorigin: - output['data'][script['src']] = { - 'crossorigin': crossorigin, - 'integrity': integrity - } + output['data'][script['src']] = {'crossorigin': crossorigin, 'integrity': integrity} if integrity and not securescheme: - output['result'] = only_if_worse('sri-implemented-but-external-scripts-not-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-implemented-but-external-scripts-not-loaded-securely', output['result'], goodness + ) elif not integrity and securescheme: - output['result'] = only_if_worse('sri-not-implemented-but-external-scripts-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-not-implemented-but-external-scripts-loaded-securely', output['result'], goodness + ) elif not integrity and not securescheme and samesld: - output['result'] = only_if_worse('sri-not-implemented-and-external-scripts' - '-not-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-not-implemented-and-external-scripts' '-not-loaded-securely', + output['result'], + goodness, + ) elif not integrity and not securescheme: - output['result'] = only_if_worse('sri-not-implemented-and-external-scripts' - '-not-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-not-implemented-and-external-scripts' '-not-loaded-securely', + output['result'], + goodness, + ) # Grant bonus even if they use SRI on the same origin else: @@ -228,20 +230,22 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- # If the page loaded from a foreign origin, but everything included SRI elif scripts and scripts_on_foreign_origin and not output['result']: - output['result'] = only_if_worse('sri-implemented-and-external-scripts-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-implemented-and-external-scripts-loaded-securely', output['result'], goodness + ) # Code defensively on the size of the data output['data'] = output['data'] if len(str(output['data'])) < 32768 else {} # Check to see if the test passed or failed - if output['result'] in ('sri-implemented-and-all-scripts-loaded-securely', - 'sri-implemented-and-external-scripts-loaded-securely', - 'sri-not-implemented-response-not-html', - 'sri-not-implemented-but-all-scripts-loaded-from-secure-origin', - 'sri-not-implemented-but-no-scripts-loaded', - expectation): + if output['result'] in ( + 'sri-implemented-and-all-scripts-loaded-securely', + 'sri-implemented-and-external-scripts-loaded-securely', + 'sri-not-implemented-response-not-html', + 'sri-not-implemented-but-all-scripts-loaded-from-secure-origin', + 'sri-not-implemented-but-no-scripts-loaded', + expectation, + ): output['pass'] = True return output diff --git a/httpobs/scanner/analyzer/headers.py b/httpobs/scanner/analyzer/headers.py index 1bc69b9a..8886739f 100644 --- a/httpobs/scanner/analyzer/headers.py +++ b/httpobs/scanner/analyzer/headers.py @@ -6,7 +6,6 @@ from httpobs.scanner.analyzer.utils import is_hpkp_preloaded, is_hsts_preloaded, only_if_worse from httpobs.scanner.retriever import get_duplicate_header_values - # Ignore the CloudFlare __cfduid tracking cookies. They *are* actually bad, but it is out of a site's # control. See https://github.com/mozilla/http-observatory/issues/121 for additional details. Hopefully # this will eventually be fixed on CloudFlare's end. @@ -78,23 +77,29 @@ def __parse_csp(csp_strings: list) -> Dict[str, Set]: # we have to do this to make the domain lowercase for comparisons later url = urlparse(source) url = url._replace(netloc=url.netloc.lower()) - values.append({ - 'source': urlunparse(url), - 'index': policy_index, - 'keep': True if policy_index == 0 else False, - }) + values.append( + { + 'source': urlunparse(url), + 'index': policy_index, + 'keep': True if policy_index == 0 else False, + } + ) else: - values.append({ - 'source': source.lower(), - 'index': policy_index, - 'keep': True if policy_index == 0 else False, - }) + values.append( + { + 'source': source.lower(), + 'index': policy_index, + 'keep': True if policy_index == 0 else False, + } + ) elif len(entry) == 1 and directive.endswith("-src"): # if it's a source list with no values, it's 'none' - values = [{ - 'source': "'none'", - 'index': policy_index, - 'keep': True if policy_index == 0 else False, - }] + values = [ + { + 'source': "'none'", + 'index': policy_index, + 'keep': True if policy_index == 0 else False, + } + ] else: values = [] @@ -167,8 +172,8 @@ def content_security_policy(reqs: dict, expectation='csp-implemented-with-no-uns output = { 'data': None, 'expectation': expectation, - 'http': False, # whether an HTTP header was available - 'meta': False, # whether an HTTP meta-equiv was available + 'http': False, # whether an HTTP header was available + 'meta': False, # whether an HTTP meta-equiv was available 'pass': False, 'policy': None, 'result': None, @@ -251,73 +256,79 @@ def content_security_policy(reqs: dict, expectation='csp-implemented-with-no-uns # 3. Remove 'self' and 'unsafe-inline' if any(source.startswith(NONCES_HASHES) for source in script_src) and '\'strict-dynamic\'' in script_src: for source in set(script_src): - if (source.startswith(DANGEROUSLY_BROAD) or - source == '\'self\'' or - source == '\'unsafe-inline\''): + if source.startswith(DANGEROUSLY_BROAD) or source == '\'self\'' or source == '\'unsafe-inline\'': script_src.remove(source) output['policy']['strictDynamic'] = True # 'strict-dynamic' in script-src without hash or nonce elif '\'strict-dynamic\'' in script_src: - output['result'] = ('csp-header-invalid' if output['result'] is None - else output['result']) + output['result'] = 'csp-header-invalid' if output['result'] is None else output['result'] # Some checks look only at active/passive CSP directives # This could be inlined, but the code is quite hard to read at that point - active_csp_sources = [source for directive, source_list in csp.items() for source in source_list if - directive not in PASSIVE_DIRECTIVES and directive not in 'script-src'] + list(script_src) - passive_csp_sources = [source for source_list in - [csp.get(directive, csp.get('default-src', [])) for directive in PASSIVE_DIRECTIVES] - for source in source_list] + active_csp_sources = [ + source + for directive, source_list in csp.items() + for source in source_list + if directive not in PASSIVE_DIRECTIVES and directive not in 'script-src' + ] + list(script_src) + passive_csp_sources = [ + source + for source_list in [csp.get(directive, csp.get('default-src', [])) for directive in PASSIVE_DIRECTIVES] + for source in source_list + ] # No 'unsafe-inline' or data: in script-src # Also don't allow overly broad schemes such as https: in either object-src or script-src # Likewise, if you don't have object-src or script-src defined, then all sources are allowed - if (script_src.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE) or - object_src.intersection(DANGEROUSLY_BROAD)): - output['result'] = ('csp-implemented-with-unsafe-inline' if output['result'] is None - else output['result']) + if script_src.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE) or object_src.intersection(DANGEROUSLY_BROAD): + output['result'] = 'csp-implemented-with-unsafe-inline' if output['result'] is None else output['result'] output['policy']['unsafeInline'] = True # If the site is https, it shouldn't allow any http: as a source (active content) - if (urlparse(response.url).scheme == 'https' and - [source for source in active_csp_sources if 'http:' in source or 'ftp:' in source] and - not output['policy']['strictDynamic']): - output['result'] = ('csp-implemented-with-insecure-scheme' if output['result'] is None - else output['result']) + if ( + urlparse(response.url).scheme == 'https' + and [source for source in active_csp_sources if 'http:' in source or 'ftp:' in source] + and not output['policy']['strictDynamic'] + ): + output['result'] = 'csp-implemented-with-insecure-scheme' if output['result'] is None else output['result'] output['policy']['insecureSchemeActive'] = True # Don't allow 'unsafe-eval' in script-src or style-src if script_src.union(style_src).intersection({'\'unsafe-eval\''}): - output['result'] = ('csp-implemented-with-unsafe-eval' if output['result'] is None - else output['result']) + output['result'] = 'csp-implemented-with-unsafe-eval' if output['result'] is None else output['result'] output['policy']['unsafeEval'] = True # If the site is https, it shouldn't allow any http: as a source (passive content) - if (urlparse(response.url).scheme == 'https' and - [source for source in passive_csp_sources if 'http:' in source or 'ftp:' in source]): - output['result'] = ('csp-implemented-with-insecure-scheme-in-passive-content-only' if output['result'] is None - else output['result']) + if urlparse(response.url).scheme == 'https' and [ + source for source in passive_csp_sources if 'http:' in source or 'ftp:' in source + ]: + output['result'] = ( + 'csp-implemented-with-insecure-scheme-in-passive-content-only' + if output['result'] is None + else output['result'] + ) output['policy']['insecureSchemePassive'] = True # Don't allow 'unsafe-inline', data:, or overly broad sources in style-src if style_src.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE): - output['result'] = ('csp-implemented-with-unsafe-inline-in-style-src-only' if output['result'] is None - else output['result']) + output['result'] = ( + 'csp-implemented-with-unsafe-inline-in-style-src-only' if output['result'] is None else output['result'] + ) output['policy']['unsafeInlineStyle'] = True # Only if default-src is 'none' and 'none' alone, since additional uris override 'none' if csp.get('default-src') == {'\'none\''}: - output['result'] = ('csp-implemented-with-no-unsafe-default-src-none' if output['result'] is None - else output['result']) + output['result'] = ( + 'csp-implemented-with-no-unsafe-default-src-none' if output['result'] is None else output['result'] + ) output['policy']['defaultNone'] = True else: - output['result'] = ('csp-implemented-with-no-unsafe' if output['result'] is None - else output['result']) + output['result'] = 'csp-implemented-with-no-unsafe' if output['result'] is None else output['result'] # Some other checks for the CSP analyzer - output['policy']['antiClickjacking'] = (not bool(frame_ancestors.intersection(DANGEROUSLY_BROAD))) + output['policy']['antiClickjacking'] = not bool(frame_ancestors.intersection(DANGEROUSLY_BROAD)) output['policy']['insecureBaseUri'] = bool(base_uri.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE)) - output['policy']['insecureFormAction'] = (bool(form_action.intersection(DANGEROUSLY_BROAD))) + output['policy']['insecureFormAction'] = bool(form_action.intersection(DANGEROUSLY_BROAD)) output['policy']['unsafeObjects'] = bool(object_src.intersection(DANGEROUSLY_BROAD)) # Once we're done, convert every set() in csp to an array @@ -329,10 +340,12 @@ def content_security_policy(reqs: dict, expectation='csp-implemented-with-no-uns output['data'] = csp if len(str(csp)) < 32768 else {} # Check to see if the test passed or failed - if output['result'] in (expectation, - 'csp-implemented-with-no-unsafe-default-src-none', - 'csp-implemented-with-unsafe-inline-in-style-src-only', - 'csp-implemented-with-insecure-scheme-in-passive-content-only'): + if output['result'] in ( + expectation, + 'csp-implemented-with-no-unsafe-default-src-none', + 'csp-implemented-with-unsafe-inline-in-style-src-only', + 'csp-implemented-with-insecure-scheme-in-passive-content-only', + ): output['pass'] = True return output @@ -374,13 +387,15 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> session = reqs['session'] # all requests and their associated cookies # The order of how bad the various results are - goodness = ['cookies-without-secure-flag-but-protected-by-hsts', - 'cookies-without-secure-flag', - 'cookies-session-without-secure-flag-but-protected-by-hsts', - 'cookies-samesite-flag-invalid', - 'cookies-anticsrf-without-samesite-flag', - 'cookies-session-without-httponly-flag', - 'cookies-session-without-secure-flag'] + goodness = [ + 'cookies-without-secure-flag-but-protected-by-hsts', + 'cookies-without-secure-flag', + 'cookies-session-without-secure-flag-but-protected-by-hsts', + 'cookies-samesite-flag-invalid', + 'cookies-anticsrf-without-samesite-flag', + 'cookies-session-without-httponly-flag', + 'cookies-session-without-secure-flag', + ] # TODO: Support cookies set over http-equiv (ugh) # https://github.com/mozilla/http-observatory/issues/265 @@ -398,7 +413,7 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> # There are certain cookies we ignore, because they are set by service providers and sites have # no control over them. for cookie in COOKIES_TO_DELETE: - del(session.cookies[cookie]) + del session.cookies[cookie] for cookie in session.cookies: # The HttpOnly and SameSite functionality is a bit broken @@ -415,54 +430,44 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> elif samesiteVal.strip().lower() == 'none': cookie.samesite = 'None' else: - output['result'] = only_if_worse('cookies-samesite-flag-invalid', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-samesite-flag-invalid', output['result'], goodness) # Add it to the jar - jar[cookie.name] = {i: getattr(cookie, i, None) for i in ['domain', 'expires', 'httponly', - 'max-age', 'path', 'port', 'samesite', 'secure']} + jar[cookie.name] = { + i: getattr(cookie, i, None) + for i in ['domain', 'expires', 'httponly', 'max-age', 'path', 'port', 'samesite', 'secure'] + } # Is it a session identifier or an anti-csrf token? sessionid = any(i in cookie.name.lower() for i in ('login', 'sess')) anticsrf = True if 'csrf' in cookie.name.lower() else False if not cookie.secure and cookie.samesite == 'None': - output['result'] = only_if_worse('cookies-samesite-flag-invalid', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-samesite-flag-invalid', output['result'], goodness) if not cookie.secure and hsts: - output['result'] = only_if_worse('cookies-without-secure-flag-but-protected-by-hsts', - output['result'], - goodness) + output['result'] = only_if_worse( + 'cookies-without-secure-flag-but-protected-by-hsts', output['result'], goodness + ) elif not cookie.secure: - output['result'] = only_if_worse('cookies-without-secure-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-without-secure-flag', output['result'], goodness) # Anti-CSRF tokens should be set using the SameSite option if anticsrf and not cookie.samesite: - output['result'] = only_if_worse('cookies-anticsrf-without-samesite-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-anticsrf-without-samesite-flag', output['result'], goodness) # Login and session cookies should be set with Secure if sessionid and not cookie.secure and hsts: - output['result'] = only_if_worse('cookies-session-without-secure-flag-but-protected-by-hsts', - output['result'], - goodness) + output['result'] = only_if_worse( + 'cookies-session-without-secure-flag-but-protected-by-hsts', output['result'], goodness + ) elif sessionid and not cookie.secure: - output['result'] = only_if_worse('cookies-session-without-secure-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-session-without-secure-flag', output['result'], goodness) # Login and session cookies should be set with HttpOnly if sessionid and not cookie.httponly: - output['result'] = only_if_worse('cookies-session-without-httponly-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-session-without-httponly-flag', output['result'], goodness) # Store whether or not we saw SameSite cookies, if cookies were set if output['result'] is None: @@ -477,9 +482,7 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> output['data'] = jar if len(str(jar)) < 32768 else {} # Check to see if the test passed or failed - if output['result'] in ('cookies-not-found', - 'cookies-secure-with-httponly-sessions-and-samesite', - expectation): + if output['result'] in ('cookies-not-found', 'cookies-secure-with-httponly-sessions-and-samesite', expectation): output['pass'] = True return output @@ -594,20 +597,15 @@ def referrer_policy(reqs: dict, expectation='referrer-policy-private') -> dict: output = { 'data': None, 'expectation': expectation, - 'http': False, # whether an HTTP header was available - 'meta': False, # whether an HTTP meta-equiv was available + 'http': False, # whether an HTTP header was available + 'meta': False, # whether an HTTP meta-equiv was available 'pass': False, 'result': None, } - goodness = ['no-referrer', - 'same-origin', - 'strict-origin', - 'strict-origin-when-cross-origin'] + goodness = ['no-referrer', 'same-origin', 'strict-origin', 'strict-origin-when-cross-origin'] - badness = ['origin', - 'origin-when-cross-origin', - 'unsafe-url'] + badness = ['origin', 'origin-when-cross-origin', 'unsafe-url'] valid = goodness + badness + ['no-referrer-when-downgrade'] @@ -619,8 +617,9 @@ def referrer_policy(reqs: dict, expectation='referrer-policy-private') -> dict: # If it's in both a header and http-equiv, http-equiv gets precedence (aka comes last) if 'Referrer-Policy' in response.headers and 'Referrer-Policy' in response.http_equiv: - output['data'] = ', '.join([response.headers['Referrer-Policy'], - response.http_equiv['Referrer-Policy']])[0:256] # Code defensively + output['data'] = ', '.join([response.headers['Referrer-Policy'], response.http_equiv['Referrer-Policy']])[ + 0:256 + ] # Code defensively elif 'Referrer-Policy' in response.headers or 'Referrer-Policy' in response.http_equiv: output['data'] = (response.http_equiv.get('Referrer-Policy') or response.headers.get('Referrer-Policy'))[0:256] else: @@ -642,10 +641,12 @@ def referrer_policy(reqs: dict, expectation='referrer-policy-private') -> dict: output['result'] = 'referrer-policy-header-invalid' # Test passed or failed - if output['result'] in ('referrer-policy-private', - 'referrer-policy-not-implemented', - 'referrer-policy-no-referrer-when-downgrade', - expectation): + if output['result'] in ( + 'referrer-policy-private', + 'referrer-policy-not-implemented', + 'referrer-policy-no-referrer-when-downgrade', + expectation, + ): output['pass'] = True return output @@ -732,9 +733,7 @@ def strict_transport_security(reqs: dict, expectation='hsts-implemented-max-age- output['preloaded'] = True # Check to see if the test passed or failed - if output['result'] in ('hsts-implemented-max-age-at-least-six-months', - 'hsts-preloaded', - expectation): + if output['result'] in ('hsts-implemented-max-age-at-least-six-months', 'hsts-preloaded', expectation): output['pass'] = True return output @@ -825,10 +824,12 @@ def x_frame_options(reqs: dict, expectation='x-frame-options-sameorigin-or-deny' output['result'] = 'x-frame-options-implemented-via-csp' # Check to see if the test passed or failed - if output['result'] in ('x-frame-options-allow-from-origin', - 'x-frame-options-sameorigin-or-deny', - 'x-frame-options-implemented-via-csp', - expectation): + if output['result'] in ( + 'x-frame-options-allow-from-origin', + 'x-frame-options-sameorigin-or-deny', + 'x-frame-options-implemented-via-csp', + expectation, + ): output['pass'] = True return output @@ -862,7 +863,7 @@ def x_xss_protection(reqs: dict, expectation='x-xss-protection-1-mode-block') -> } enabled = False # XXSSP enabled or not - valid = True # XXSSP header valid or not + valid = True # XXSSP header valid or not response = reqs['responses']['auto'] header = response.headers.get('X-XSS-Protection', '').strip() xxssp = {} diff --git a/httpobs/scanner/analyzer/misc.py b/httpobs/scanner/analyzer/misc.py index 20fd0feb..009de542 100644 --- a/httpobs/scanner/analyzer/misc.py +++ b/httpobs/scanner/analyzer/misc.py @@ -1,6 +1,7 @@ -from bs4 import BeautifulSoup as bs from urllib.parse import urlparse +from bs4 import BeautifulSoup as bs + from httpobs.scanner.analyzer.decorators import scored_test from httpobs.scanner.analyzer.utils import is_hsts_preloaded @@ -17,8 +18,9 @@ def __parse_acao_xml_get_domains(xml, type='crossdomain') -> list: # Parse the files if type == 'crossdomain': - return [domains.get('domain').strip() - for domains in soup.find_all('allow-access-from') if domains.get('domain')] + return [ + domains.get('domain').strip() for domains in soup.find_all('allow-access-from') if domains.get('domain') + ] elif type == 'clientaccesspolicy': return [domains.get('uri').strip() for domains in soup.find_all('domain') if domains.get('uri')] @@ -40,11 +42,7 @@ def cross_origin_resource_sharing(reqs: dict, expectation='cross-origin-resource result: short string describing the result of the test """ output = { - 'data': { - 'acao': None, - 'clientaccesspolicy': None, - 'crossdomain': None - }, + 'data': {'acao': None, 'clientaccesspolicy': None, 'crossdomain': None}, 'expectation': expectation, 'pass': False, 'result': 'cross-origin-resource-sharing-not-implemented', @@ -59,8 +57,10 @@ def cross_origin_resource_sharing(reqs: dict, expectation='cross-origin-resource if output['data']['acao'] == '*': output['result'] = 'cross-origin-resource-sharing-implemented-with-public-access' - elif (acao.request.headers.get('Origin') == acao.headers['Access-Control-Allow-Origin'] and - acao.headers.get('Access-Control-Allow-Credentials', '').lower().strip() == 'true'): + elif ( + acao.request.headers.get('Origin') == acao.headers['Access-Control-Allow-Origin'] + and acao.headers.get('Access-Control-Allow-Credentials', '').lower().strip() == 'true' + ): output['result'] = 'cross-origin-resource-sharing-implemented-with-universal-access' else: output['result'] = 'cross-origin-resource-sharing-implemented-with-restricted-access' @@ -88,9 +88,11 @@ def cross_origin_resource_sharing(reqs: dict, expectation='cross-origin-resource output['result'] = 'cross-origin-resource-sharing-implemented-with-restricted-access' # Check to see if the test passed or failed - if output['result'] in ('cross-origin-resource-sharing-implemented-with-public-access', - 'cross-origin-resource-sharing-implemented-with-restricted-access', - expectation): + if output['result'] in ( + 'cross-origin-resource-sharing-implemented-with-public-access', + 'cross-origin-resource-sharing-implemented-with-restricted-access', + expectation, + ): output['pass'] = True return output @@ -165,8 +167,7 @@ def redirection(reqs: dict, expectation='redirection-to-https') -> dict: # If it's an http -> https redirection, make sure it redirects to the same host. If that's not done, then # HSTS cannot be properly set on the original host # TODO: Check for redirections like: http://www.example.com -> https://example.com -> https://www.example.com - elif (route[0].scheme == 'http' and route[1].scheme == 'https' and - route[0].hostname != route[1].hostname): + elif route[0].scheme == 'http' and route[1].scheme == 'https' and route[0].hostname != route[1].hostname: output['result'] = 'redirection-off-host-from-http' output['status_code'] = response.history[-1].status_code else: @@ -177,9 +178,7 @@ def redirection(reqs: dict, expectation='redirection-to-https') -> dict: output['status_code'] = output['status_code'] if len(str(output['status_code'])) < 5 else None # Check to see if the test passed or failed - if output['result'] in ('redirection-not-needed-no-http', - 'redirection-all-redirects-preloaded', - expectation): + if output['result'] in ('redirection-not-needed-no-http', 'redirection-all-redirects-preloaded', expectation): output['pass'] = True return output diff --git a/httpobs/scanner/analyzer/utils.py b/httpobs/scanner/analyzer/utils.py index 102de2b6..3f20e847 100644 --- a/httpobs/scanner/analyzer/utils.py +++ b/httpobs/scanner/analyzer/utils.py @@ -1,7 +1,6 @@ import json import os.path - # Load the HSTS list from disk __dirname = os.path.abspath(os.path.dirname(__file__)) __filename = os.path.join(__dirname, '..', '..', 'conf', 'hsts-preload.json') diff --git a/httpobs/scanner/celeryconfig.py b/httpobs/scanner/celeryconfig.py deleted file mode 100644 index fde3868a..00000000 --- a/httpobs/scanner/celeryconfig.py +++ /dev/null @@ -1,14 +0,0 @@ -from httpobs.conf import BROKER_URL - - -# Set the Celery task queue -BROKER_URL = BROKER_URL - -CELERY_ACCEPT_CONTENT = ['json'] -CELERY_IGNORE_RESULTS = True -CELERY_REDIRECT_STDOUTS_LEVEL = 'WARNING' -CELERY_RESULT_SERIALIZER = 'json' -CELERY_TASK_SERIALIZER = 'json' - -CELERYD_TASK_SOFT_TIME_LIMIT = 751 -CELERYD_TASK_TIME_LIMIT = 1129 diff --git a/httpobs/scanner/grader/__init__.py b/httpobs/scanner/grader/__init__.py index c42f32de..2279e29a 100644 --- a/httpobs/scanner/grader/__init__.py +++ b/httpobs/scanner/grader/__init__.py @@ -1,12 +1,15 @@ -from .grade import (get_score_description, - get_score_modifier, - get_grade_and_likelihood_for_score, - GRADES, - MINIMUM_SCORE_FOR_EXTRA_CREDIT) +from .grade import ( + GRADES, + MINIMUM_SCORE_FOR_EXTRA_CREDIT, + get_grade_and_likelihood_for_score, + get_score_description, + get_score_modifier, +) - -__all__ = ['get_score_description', - 'get_score_modifier', - 'get_grade_and_likelihood_for_score', - 'GRADES', - 'MINIMUM_SCORE_FOR_EXTRA_CREDIT'] +__all__ = [ + 'get_score_description', + 'get_score_modifier', + 'get_grade_and_likelihood_for_score', + 'GRADES', + 'MINIMUM_SCORE_FOR_EXTRA_CREDIT', +] diff --git a/httpobs/scanner/grader/grade.py b/httpobs/scanner/grader/grade.py index 0bb53162..e5095852 100644 --- a/httpobs/scanner/grader/grade.py +++ b/httpobs/scanner/grader/grade.py @@ -19,20 +19,14 @@ 15: 'F', 10: 'F', 5: 'F', - 0: 'F' + 0: 'F', } # See https://wiki.mozilla.org/Security/Standard_Levels for a definition of the risk levels # We cannot make an accurate decision on HIGH and MAXIMUM risk likelihood indicators with the current checks, # thus the likelihood indicator is currently at best (or worse) MEDIUM. Modifiers (A-A+B+B-, ... are normalized # A,B, ...) in the calling function. -LIKELIHOOD_INDICATOR_CHART = { - 'A': 'LOW', - 'B': 'MEDIUM', - 'C': 'MEDIUM', - 'D': 'MEDIUM', - 'F': 'MEDIUM' -} +LIKELIHOOD_INDICATOR_CHART = {'A': 'LOW', 'B': 'MEDIUM', 'C': 'MEDIUM', 'D': 'MEDIUM', 'F': 'MEDIUM'} # The minimum required score to receive extra credit MINIMUM_SCORE_FOR_EXTRA_CREDIT = 90 @@ -61,7 +55,6 @@ 'description': 'Contribute.json file cannot be parsed', 'modifier': -10, }, - # CSP 'csp-implemented-with-no-unsafe-default-src-none': { 'description': 'Content Security Policy (CSP) implemented with default-src \'none\' and no \'unsafe\'', @@ -72,13 +65,17 @@ 'modifier': 5, }, 'csp-implemented-with-unsafe-inline-in-style-src-only': { - 'description': ('Content Security Policy (CSP) implemented with unsafe sources inside style-src. ' - 'This includes \'unsafe-inline\', data: or overly broad sources such as https:.'), + 'description': ( + 'Content Security Policy (CSP) implemented with unsafe sources inside style-src. ' + 'This includes \'unsafe-inline\', data: or overly broad sources such as https:.' + ), 'modifier': 0, }, 'csp-implemented-with-insecure-scheme-in-passive-content-only': { - 'description': ('Content Security Policy (CSP) implemented, ' - 'but secure site allows images or media to be loaded over HTTP'), + 'description': ( + 'Content Security Policy (CSP) implemented, ' + 'but secure site allows images or media to be loaded over HTTP' + ), 'modifier': -10, }, 'csp-implemented-with-unsafe-eval': { @@ -86,15 +83,18 @@ 'modifier': -10, }, 'csp-implemented-with-unsafe-inline': { - 'description': ('Content Security Policy (CSP) implemented unsafely. ' - 'This includes \'unsafe-inline\' or data: inside script-src, ' - 'overly broad sources such as https: inside object-src or script-src, ' - 'or not restricting the sources for object-src or script-src.'), + 'description': ( + 'Content Security Policy (CSP) implemented unsafely. ' + 'This includes \'unsafe-inline\' or data: inside script-src, ' + 'overly broad sources such as https: inside object-src or script-src, ' + 'or not restricting the sources for object-src or script-src.' + ), 'modifier': -20, }, 'csp-implemented-with-insecure-scheme': { - 'description': ('Content Security Policy (CSP) implemented, ' - 'but secure site allows resources to be loaded over HTTP'), + 'description': ( + 'Content Security Policy (CSP) implemented, ' 'but secure site allows resources to be loaded over HTTP' + ), 'modifier': -20, }, 'csp-header-invalid': { @@ -105,13 +105,13 @@ 'description': 'Content Security Policy (CSP) header not implemented', 'modifier': -25, }, - # Cookies 'cookies-secure-with-httponly-sessions-and-samesite': { - 'description': ('All cookies use the Secure flag, session cookies use the HttpOnly flag, and cross-origin ' - 'restrictions are in place via the SameSite flag'), + 'description': ( + 'All cookies use the Secure flag, session cookies use the HttpOnly flag, and cross-origin ' + 'restrictions are in place via the SameSite flag' + ), 'modifier': 5, - }, 'cookies-secure-with-httponly-sessions': { 'description': 'All cookies use the Secure flag and all session cookies use the HttpOnly flag', @@ -149,27 +149,28 @@ 'description': 'Session cookie set without using the Secure flag or set over HTTP', 'modifier': -40, }, - # Cross-origin resource sharing 'cross-origin-resource-sharing-not-implemented': { 'description': 'Content is not visible via cross-origin resource sharing (CORS) files or headers', 'modifier': 0, }, 'cross-origin-resource-sharing-implemented-with-public-access': { - 'description': ('Public content is visible via cross-origin resource sharing (CORS) ' - 'Access-Control-Allow-Origin header'), + 'description': ( + 'Public content is visible via cross-origin resource sharing (CORS) ' 'Access-Control-Allow-Origin header' + ), 'modifier': 0, }, 'cross-origin-resource-sharing-implemented-with-restricted-access': { - 'description': ('Content is visible via cross-origin resource sharing (CORS) files or headers, ' - 'but is restricted to specific domains'), + 'description': ( + 'Content is visible via cross-origin resource sharing (CORS) files or headers, ' + 'but is restricted to specific domains' + ), 'modifier': 0, }, 'cross-origin-resource-sharing-implemented-with-universal-access': { 'description': 'Content is visible via cross-origin resource sharing (CORS) file or headers', 'modifier': -50, }, - # Public Key Pinning 'hpkp-preloaded': { 'description': 'Preloaded via the HTTP Public Key Pinning (HPKP) preloading process', @@ -192,15 +193,15 @@ 'modifier': 0, }, 'hpkp-invalid-cert': { - 'description': ('HTTP Public Key Pinning (HPKP) header cannot be set, ' - 'as site contains an invalid certificate chain'), + 'description': ( + 'HTTP Public Key Pinning (HPKP) header cannot be set, ' 'as site contains an invalid certificate chain' + ), 'modifier': 0, }, 'hpkp-header-invalid': { 'description': 'HTTP Public Key Pinning (HPKP) header cannot be recognized', 'modifier': -5, }, - # Redirection 'redirection-all-redirects-preloaded': { 'description': 'All hosts redirected to are in the HTTP Strict Transport Security (HSTS) preload list', @@ -234,11 +235,12 @@ 'description': 'Invalid certificate chain encountered during redirection', 'modifier': -20, }, - # Referrer Policy 'referrer-policy-private': { - 'description': ('Referrer-Policy header set to "no-referrer", "same-origin", "strict-origin" or ' - '"strict-origin-when-cross-origin"'), + 'description': ( + 'Referrer-Policy header set to "no-referrer", "same-origin", "strict-origin" or ' + '"strict-origin-when-cross-origin"' + ), 'modifier': 5, }, 'referrer-policy-no-referrer-when-downgrade': { @@ -257,7 +259,6 @@ 'description': 'Referrer-Policy header cannot be recognized', 'modifier': -5, }, - # Strict Transport Security (HSTS) 'hsts-preloaded': { 'description': 'Preloaded via the HTTP Strict Transport Security (HSTS) preloading process', @@ -284,11 +285,12 @@ 'modifier': -20, }, 'hsts-invalid-cert': { - 'description': ('HTTP Strict Transport Security (HSTS) header cannot be set, ' - 'as site contains an invalid certificate chain'), + 'description': ( + 'HTTP Strict Transport Security (HSTS) header cannot be set, ' + 'as site contains an invalid certificate chain' + ), 'modifier': -20, }, - # Subresource Integrity (SRI) 'sri-implemented-and-all-scripts-loaded-securely': { 'description': 'Subresource Integrity (SRI) is implemented and all scripts are loaded from a similar origin', @@ -315,16 +317,19 @@ 'modifier': -5, }, 'sri-implemented-but-external-scripts-not-loaded-securely': { - 'description': ('Subresource Integrity (SRI) implemented, but external scripts are loaded over HTTP or use ' - 'protocol-relative URLs via src="//..."'), + 'description': ( + 'Subresource Integrity (SRI) implemented, but external scripts are loaded over HTTP or use ' + 'protocol-relative URLs via src="//..."' + ), 'modifier': -20, }, 'sri-not-implemented-and-external-scripts-not-loaded-securely': { - 'description': ('Subresource Integrity (SRI) not implemented, and external scripts are loaded over HTTP or ' - 'use protocol-relative URLs via src="//..."'), + 'description': ( + 'Subresource Integrity (SRI) not implemented, and external scripts are loaded over HTTP or ' + 'use protocol-relative URLs via src="//..."' + ), 'modifier': -50, }, - # X-Content-Type-Options 'x-content-type-options-nosniff': { 'description': 'X-Content-Type-Options header set to "nosniff"', @@ -338,7 +343,6 @@ 'description': 'X-Content-Type-Options header cannot be recognized', 'modifier': -5, }, - # X-Frame-Options 'x-frame-options-implemented-via-csp': { 'description': 'X-Frame-Options (XFO) implemented via the CSP frame-ancestors directive', @@ -360,7 +364,6 @@ 'description': 'X-Frame-Options (XFO) header cannot be recognized', 'modifier': -20, }, - # X-XSS-Protection 'x-xss-protection-enabled-mode-block': { 'description': 'X-XSS-Protection header set to "1; mode=block"', @@ -386,7 +389,6 @@ 'description': 'X-XSS-Protection header cannot be recognized', 'modifier': -10, }, - # Generic results 'html-not-parsable': { 'description': 'Claims to be html, but cannot be parsed', @@ -399,7 +401,7 @@ 'xml-not-parsable': { 'description': 'Claims to be xml, but cannot be parsed', 'modifier': -20, # can't run an ACAO check if the xml files can't be parsed - } + }, } diff --git a/httpobs/scanner/local.py b/httpobs/scanner/local.py index d10ea07c..36485fac 100644 --- a/httpobs/scanner/local.py +++ b/httpobs/scanner/local.py @@ -1,77 +1,4 @@ -import httpobs.conf +from httpobs.scanner import scan -from httpobs.scanner.analyzer import NUM_TESTS, tests -from httpobs.scanner.grader import get_grade_and_likelihood_for_score, get_score_description -from httpobs.scanner.retriever import retrieve_all - - -def scan(hostname, **kwargs): - """Performs an Observatory scan, but doesn't require any database/redis - backing. Given the lowered security concerns due to not being a public - API, you can use this to scan arbitrary ports and paths. - - Args: - hostname (str): domain name for host to be scanned. Must not include - protocol (http://, https://) or port number (:80). - - Kwargs: - http_port (int): port to scan for HTTP, instead of 80 - https_port (int): port to be scanned for HTTPS, instead of 443 - path (str): path to scan, instead of "/" - verify (bool): whether to enable or disable certificate verification, - enabled by default. This can allow tested sites to pass the HSTS - and HPKP tests, even with self-signed certificates. - - cookies (dict): Cookies sent to the system being scanned. Matches the - requests cookie dict. - headers (dict): HTTP headers sent to the system being scanned. Format - matches the requests headers dict. - - Returns: - A dict representing the analyze (scan) and getScanResults (test) API call. Example: - - { - 'scan': { - 'grade': 'A' - ... - }, - 'test': { - 'content-security-policy': { - 'pass': True - ... - } - } - } - """ - # Always allow localhost scans when run in this way - httpobs.conf.SCANNER_ALLOW_LOCALHOST = True - - # Attempt to retrieve all the resources, not capturing exceptions - reqs = retrieve_all(hostname, **kwargs) - - # If we can't connect at all, let's abort the test - if reqs['responses']['auto'] is None: - return {'error': 'site down'} - - # Get all the results - results = [test(reqs) for test in tests] - for result in results: - result['score_description'] = get_score_description(result['result']) - - # Get the score, grade, etc. - grades = get_grade_and_likelihood_for_score(100 + sum([result.get('score_modifier', 0) for result in results])) - tests_passed = sum([1 if result.get('pass') else 0 for result in results]) - - # Return the results - return({ - 'scan': { - 'grade': grades[1], - 'likelihood_indicator': grades[2], - 'response_headers': dict(reqs['responses']['auto'].headers), - 'score': grades[0], - 'tests_failed': NUM_TESTS - tests_passed, - 'tests_passed': tests_passed, - 'tests_quantity': NUM_TESTS, - }, - 'tests': {result.pop('name'): result for result in results} - }) +# for backwards compatibility, so consumers can continue to use httpobs.scanner.local.scan +__all__ = ["scan"] diff --git a/httpobs/scanner/main.py b/httpobs/scanner/main.py deleted file mode 100644 index 81fea97a..00000000 --- a/httpobs/scanner/main.py +++ /dev/null @@ -1,189 +0,0 @@ -from random import randrange -from time import sleep -from urllib.parse import parse_qs, urlparse - -from httpobs.conf import (BROKER_URL, - SCANNER_ALLOW_KICKSTART, - SCANNER_ALLOW_KICKSTART_NUM_ABORTED, - SCANNER_BROKER_RECONNECTION_SLEEP_TIME, - SCANNER_CYCLE_SLEEP_TIME, - SCANNER_DATABASE_RECONNECTION_SLEEP_TIME, - SCANNER_MAINTENANCE_CYCLE_FREQUENCY, - SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY, - SCANNER_MAX_CPU_UTILIZATION, - SCANNER_MAX_LOAD) -from httpobs.database import (periodic_maintenance, - refresh_materialized_views, - update_scans_dequeue_scans) -from httpobs.scanner.tasks import scan - -import datetime -import psutil -import redis -import subprocess -import sys - - -def main(): - # Start each scanner at a random point in the range to spread out database maintenance - dequeue_loop_count = randrange(0, SCANNER_MAINTENANCE_CYCLE_FREQUENCY) - materialized_view_loop_count = randrange(0, SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY) - - # Parse the BROKER_URL - broker_url = urlparse(BROKER_URL) - - if broker_url.scheme.lower() not in ('redis', 'redis+socket'): # Currently the de-queuer only support redis - print('Sorry, the scanner currently only supports redis.', file=sys.stderr) - sys.exit(1) - - # Get the current CPU utilization and wait a second to begin the loop for the next reading - psutil.cpu_percent() - sleep(1) - - while True: - try: - # TODO: Document this madness and magic numbers, make it configurable - # If max cpu is 90 and current CPU is 50, that gives us a headroom of 8 scans - headroom = int((SCANNER_MAX_CPU_UTILIZATION - psutil.cpu_percent()) / 5) - dequeue_quantity = min(headroom, SCANNER_MAX_LOAD) - - if headroom <= 0: - # If the cycle sleep time is .5, sleep 2 seconds at a minimum, 10 seconds at a maximum - sleep_time = min(max(abs(headroom), SCANNER_CYCLE_SLEEP_TIME * 4), 10) - print('[{time}] WARNING: Load too high. Sleeping for {num} second(s).'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=sleep_time), - file=sys.stderr) - - sleep(sleep_time) - continue - - except: - # I've noticed that on laptops that Docker has a tendency to kill the scanner when the laptop sleeps; this - # is designed to catch that exception - sleep(1) - continue - - # Every so many scans, let's opportunistically clear out any PENDING scans that are older than 1800 seconds - # Also update the grade_distribution table - # If it fails, we don't care. Of course, nobody reads the comments, so I should say that *I* don't care. - try: - if dequeue_loop_count % SCANNER_MAINTENANCE_CYCLE_FREQUENCY == 0: - print('[{time}] INFO: Performing periodic maintenance.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) - - dequeue_loop_count = 0 - num = periodic_maintenance() - - if num > 0: - print('[{time}] INFO: Cleared {num} broken scan(s).'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=num), - file=sys.stderr) - - # Forcibly restart if things are going real bad, sleep for a bit to avoid flagging - if num > SCANNER_ALLOW_KICKSTART_NUM_ABORTED and SCANNER_ALLOW_KICKSTART: - sleep(10) - try: - print('[{time}] ERROR: Celery appears to be hung. Attempting to kickstart the scanners.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) - subprocess.call(['pkill', '-u', 'httpobs']) - except FileNotFoundError: - print('[{time}] ERROR: Tried to kickstart, but no pkill found.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) - except: - print('[{time}] ERROR: Tried to kickstart, but failed for unknown reasons.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) - except: - pass - finally: - dequeue_loop_count += 1 - num = 0 - - # Every so often we need to refresh the materialized views that the statistics depend on - try: - if materialized_view_loop_count % SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY == 0: - print('[{time}] INFO: Refreshing materialized views.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) - - materialized_view_loop_count = 0 - refresh_materialized_views() - - print('[{time}] INFO: Materialized views refreshed.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) - except: - pass - finally: - materialized_view_loop_count += 1 - - # Verify that the broker is still up; if it's down, let's sleep and try again later - try: - if broker_url.scheme.lower() == 'redis': - conn = redis.Connection(host=broker_url.hostname, - port=broker_url.port or 6379, - db=int(broker_url.path[1:] if len(broker_url.path) > 0 else 0), - password=broker_url.password) - else: - conn = redis.UnixDomainSocketConnection(path=broker_url.path, - db=int(parse_qs(broker_url.query).get( - 'virtual_host', ['0']) - [0])) - - conn.connect() - conn.can_read() - conn.disconnect() - del conn - except: - print('[{time}] ERROR: Unable to connect to to redis. Sleeping for {num} seconds.'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=SCANNER_BROKER_RECONNECTION_SLEEP_TIME), - file=sys.stderr - ) - sleep(SCANNER_BROKER_RECONNECTION_SLEEP_TIME) - continue - - # Get a list of sites that are pending - try: - sites_to_scan = update_scans_dequeue_scans(dequeue_quantity) - except IOError: - print('[{time}] ERROR: Unable to retrieve lists of sites to scan. Sleeping for {num} seconds.'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=SCANNER_DATABASE_RECONNECTION_SLEEP_TIME), - file=sys.stderr - ) - sleep(SCANNER_DATABASE_RECONNECTION_SLEEP_TIME) - continue - - try: - if sites_to_scan: - print('[{time}] INFO: Dequeuing {num} site(s): {sites}.'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=len(sites_to_scan), - sites=', '.join([site[0] for site in sites_to_scan])), - file=sys.stderr - ) - - for site in sites_to_scan: - scan.delay(*site) - - # Always sleep at least some amount of time so that CPU utilization measurements can track - sleep(SCANNER_CYCLE_SLEEP_TIME / 2) - else: # If the queue was empty, lets sleep a little bit - sleep(SCANNER_CYCLE_SLEEP_TIME) - except KeyboardInterrupt: - print('Exiting scanner backend') - sys.exit(1) - except: # this shouldn't trigger, but we don't want a scan breakage to kill the scanner - print('[{time}] ERROR: Unknown celery error.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) - - -if __name__ == '__main__': - main() diff --git a/httpobs/scanner/requirements.txt b/httpobs/scanner/requirements.txt deleted file mode 100644 index a27faf57..00000000 --- a/httpobs/scanner/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -psutil==5.9.0 -publicsuffixlist==0.7.12 -requests==2.27.1 diff --git a/httpobs/scanner/retriever/retriever.py b/httpobs/scanner/retriever/retriever.py index 8bbec210..c8e9b131 100644 --- a/httpobs/scanner/retriever/retriever.py +++ b/httpobs/scanner/retriever/retriever.py @@ -1,20 +1,16 @@ -from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded +import logging from urllib.parse import urlparse -from httpobs.conf import (RETRIEVER_CONNECT_TIMEOUT, - RETRIEVER_CORS_ORIGIN, - RETRIEVER_READ_TIMEOUT, - RETRIEVER_USER_AGENT) -from httpobs.scanner.utils import parse_http_equiv_headers - -import logging import requests - # Disable the requests InsecureRequestWarning -- we will track certificate errors manually when # verification is disabled. Also disable requests errors at levels lower than CRITICAL, see: # https://github.com/celery/celery/issues/3633 for crashy details from requests.packages.urllib3.exceptions import InsecureRequestWarning + +from httpobs.conf import RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_CORS_ORIGIN, RETRIEVER_READ_TIMEOUT, RETRIEVER_USER_AGENT +from httpobs.scanner.utils import parse_http_equiv_headers + requests.packages.urllib3.disable_warnings(InsecureRequestWarning) logging.getLogger('requests').setLevel(logging.CRITICAL) @@ -52,18 +48,17 @@ def __create_session(url: str, **kwargs) -> dict: # Override the User-Agent; some sites (like twitter) don't send the CSP header unless you have a modern # user agent - s.headers.update({ - 'User-Agent': RETRIEVER_USER_AGENT, - }) + s.headers.update( + { + 'User-Agent': RETRIEVER_USER_AGENT, + } + ) try: r = s.get(url, timeout=TIMEOUT) # No tls errors r.verified = True - # Let celery exceptions percolate upward - except (SoftTimeLimitExceeded, TimeLimitExceeded): - raise # We can try again if there's an SSL error, making sure to note it in the session except requests.exceptions.SSLError: try: @@ -98,13 +93,12 @@ def __get(session, relative_path='/', headers=None, cookies=None): # TODO: limit the maximum size of the response, to keep malicious site operators from killing us # TODO: Perhaps we can naively do it for now by simply setting a timeout? # TODO: catch TLS errors instead of just setting it to None? - return session.get(session.url.scheme + '://' + session.url.netloc + relative_path, - headers=headers, - cookies=cookies, - timeout=TIMEOUT) - # Let celery exceptions percolate upward - except (SoftTimeLimitExceeded, TimeLimitExceeded): - raise + return session.get( + session.url.scheme + '://' + session.url.netloc + relative_path, + headers=headers, + cookies=cookies, + timeout=TIMEOUT, + ) except (KeyboardInterrupt, SystemExit): raise except: @@ -126,8 +120,8 @@ def __get_page_text(response: requests.Response, force: bool = False) -> str: def retrieve_all(hostname, **kwargs): - kwargs['cookies'] = kwargs.get('cookies', {}) # HTTP cookies to send, instead of from the database - kwargs['headers'] = kwargs.get('headers', {}) # HTTP headers to send, instead of from the database + kwargs['cookies'] = kwargs.get('cookies', {}) # HTTP cookies to send, instead of from the database + kwargs['headers'] = kwargs.get('headers', {}) # HTTP headers to send, instead of from the database # This way of doing it keeps the urls tidy even if makes the code ugly kwargs['http_port'] = ':' + str(kwargs.get('http_port', '')) if 'http_port' in kwargs else '' @@ -137,8 +131,7 @@ def retrieve_all(hostname, **kwargs): retrievals = { 'hostname': hostname, - 'resources': { - }, + 'resources': {}, 'responses': { 'auto': None, # whichever of 'http' or 'https' actually works, with 'https' as higher priority 'cors': None, # CORS preflight test @@ -149,12 +142,7 @@ def retrieve_all(hostname, **kwargs): } # The list of resources to get - resources = ( - '/clientaccesspolicy.xml', - '/contribute.json', - '/crossdomain.xml', - '/robots.txt' - ) + resources = ('/clientaccesspolicy.xml', '/contribute.json', '/crossdomain.xml', '/robots.txt') # Create some reusable sessions, one for HTTP and one for HTTPS http_session = __create_session('http://' + hostname + kwargs['http_port'] + kwargs['path'], **kwargs) @@ -180,9 +168,9 @@ def retrieve_all(hostname, **kwargs): retrievals['resources']['__path__'] = __get_page_text(retrievals['responses']['auto'], force=True) # Do a CORS preflight request - retrievals['responses']['cors'] = __get(retrievals['session'], - kwargs['path'], - headers={'Origin': RETRIEVER_CORS_ORIGIN}) + retrievals['responses']['cors'] = __get( + retrievals['session'], kwargs['path'], headers={'Origin': RETRIEVER_CORS_ORIGIN} + ) # Store all the files we retrieve for resource in resources: @@ -190,8 +178,10 @@ def retrieve_all(hostname, **kwargs): retrievals['resources'][resource] = __get_page_text(resp) # Parse out the HTTP meta-equiv headers - if (retrievals['responses']['auto'].headers.get('Content-Type', '').split(';')[0] in HTML_TYPES and - retrievals['resources']['__path__']): + if ( + retrievals['responses']['auto'].headers.get('Content-Type', '').split(';')[0] in HTML_TYPES + and retrievals['resources']['__path__'] + ): retrievals['responses']['auto'].http_equiv = parse_http_equiv_headers(retrievals['resources']['__path__']) else: retrievals['responses']['auto'].http_equiv = {} diff --git a/httpobs/scanner/scanner.py b/httpobs/scanner/scanner.py new file mode 100644 index 00000000..ba0f4984 --- /dev/null +++ b/httpobs/scanner/scanner.py @@ -0,0 +1,99 @@ +from httpobs.scanner.analyzer import NUM_TESTS, tests +from httpobs.scanner.grader import ( + MINIMUM_SCORE_FOR_EXTRA_CREDIT, + get_grade_and_likelihood_for_score, + get_score_description, +) +from httpobs.scanner.retriever import retrieve_all +from httpobs.scanner.utils import sanitize_headers + +# Current algorithm version +ALGORITHM_VERSION = 2 + + +def scan(hostname: str, **kwargs): + """Performs an Observatory scan. + + Args: + hostname (str): domain name for host to be scanned. Must not include + protocol (http://, https://) or port number (:80). + + Kwargs: + http_port (int): port to scan for HTTP, instead of 80 + https_port (int): port to be scanned for HTTPS, instead of 443 + path (str): path to scan, instead of "/" + verify (bool): whether to enable or disable certificate verification, + enabled by default. This can allow tested sites to pass the HSTS + and HPKP tests, even with self-signed certificates. + + cookies (dict): Cookies sent to the system being scanned. Matches the + requests cookie dict. + headers (dict): HTTP headers sent to the system being scanned. Format + matches the requests headers dict. + + Returns: + A dict representing the analyze (scan) and getScanResults (test) API call. Example: + + { + 'scan': { + 'grade': 'A' + ... + }, + 'test': { + 'content-security-policy': { + 'pass': True + ... + } + } + } + """ + + # Attempt to retrieve all the resources + reqs = retrieve_all(hostname, **kwargs) + + # If we can't connect at all, let's abort the test + if reqs['responses']['auto'] is None: + return {'error': 'site down'} + + results = [test(reqs) for test in tests] + response_headers = sanitize_headers(reqs["responses"]["auto"].headers) + status_code = reqs["responses"]["auto"].status_code + + tests_passed = 0 + score_with_extra_credit = uncurved_score = 100 + + for result in results: + result["score_description"] = get_score_description(result['result']) + + passed = result.get("pass") + score_modifier = result.get("score_modifier") + + # Keep track of how many tests passed or failed + if passed: + tests_passed += 1 + + # And keep track of the score + score_with_extra_credit += score_modifier + if score_modifier < 0: + uncurved_score += score_modifier + + # Only record the full score if the uncurved score already receives an A + score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score + + # Now we need to update the scans table + score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score) + + return { + "scan": { + "algorithm_version": ALGORITHM_VERSION, + "grade": grade, + "likelihood_indicator": likelihood_indicator, + "response_headers": response_headers, + "score": score, + "tests_failed": NUM_TESTS - tests_passed, + "tests_passed": tests_passed, + "tests_quantity": NUM_TESTS, + "status_code": status_code, + }, + "tests": {result.pop("name"): result for result in results}, + } diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py deleted file mode 100644 index 03c84c04..00000000 --- a/httpobs/scanner/tasks.py +++ /dev/null @@ -1,71 +0,0 @@ -from celery import Celery -from celery.exceptions import ( - SoftTimeLimitExceeded, - TimeLimitExceeded, - WorkerLostError, - WorkerShutdown, - WorkerTerminate) - -from httpobs.conf import DEVELOPMENT_MODE -from httpobs.database import (insert_test_results, - select_site_headers, - update_scan_state) -from httpobs.scanner import celeryconfig, STATE_ABORTED, STATE_FAILED, STATE_RUNNING -from httpobs.scanner.analyzer import tests -from httpobs.scanner.retriever import retrieve_all -from httpobs.scanner.utils import sanitize_headers - -import sys - - -# Create the scanner task queue -scanner = Celery() -scanner.config_from_object(celeryconfig) - - -@scanner.task() -def scan(hostname: str, site_id: int, scan_id: int): - try: - # Once celery kicks off the task, let's update the scan state from PENDING to RUNNING - update_scan_state(scan_id, STATE_RUNNING) - - # Get the site's cookies and headers - headers = select_site_headers(hostname) - - # Attempt to retrieve all the resources - reqs = retrieve_all(hostname, cookies=headers['cookies'], headers=headers['headers']) - - # If we can't connect at all, let's abort the test - if reqs['responses']['auto'] is None: - update_scan_state(scan_id, STATE_FAILED, error='site down') - - return - - # Execute each test, replacing the underscores in the function name with dashes in the test name - # TODO: Get overridden expectations - insert_test_results(site_id, - scan_id, - [test(reqs) for test in tests], - sanitize_headers(reqs['responses']['auto'].headers), - reqs['responses']['auto'].status_code) - - # catch the celery timeout, which will almost certainly occur in retrieve_all() - except SoftTimeLimitExceeded: - update_scan_state(scan_id, STATE_ABORTED, error='site unresponsive') - except (TimeLimitExceeded, WorkerLostError, WorkerShutdown, WorkerTerminate): - raise - # the database is down, oh no! - except IOError: - print('database down, aborting scan on {hostname}'.format(hostname=hostname), file=sys.stderr) - except: - # TODO: have more specific error messages - e = sys.exc_info()[1] # get the error message - - # If we are unsuccessful, close out the scan in the database - update_scan_state(scan_id, STATE_FAILED, error=repr(e)) - - # Print the exception to stderr if we're in dev - if DEVELOPMENT_MODE: - import traceback - print('Error detected in scan for : ' + hostname) - traceback.print_exc(file=sys.stderr) diff --git a/httpobs/scanner/utils.py b/httpobs/scanner/utils.py index c3eb4fb2..771c20ae 100644 --- a/httpobs/scanner/utils.py +++ b/httpobs/scanner/utils.py @@ -1,22 +1,22 @@ import json import os.path -import requests -import socket import sys +import requests from bs4 import BeautifulSoup as bs -from httpobs.conf import (SCANNER_ALLOW_LOCALHOST, - SCANNER_PINNED_DOMAINS) from requests.structures import CaseInsensitiveDict +from httpobs.conf import SCANNER_PINNED_DOMAINS HSTS_URL = 'https://raw.githubusercontent.com/chromium/chromium/main/net/http/transport_security_state_static.json' def parse_http_equiv_headers(html: str) -> CaseInsensitiveDict: - http_equiv_headers = CaseInsensitiveDict({ - 'Content-Security-Policy': [], - }) + http_equiv_headers = CaseInsensitiveDict( + { + 'Content-Security-Policy': [], + } + ) # Try to parse the HTML try: @@ -54,13 +54,16 @@ def retrieve_store_hsts_preload_list(): r = json.loads(r) # Mapping of site -> whether it includes subdomains - hsts = {site['name']: { - 'includeSubDomains': site.get('include_subdomains', False), - 'includeSubDomainsForPinning': - site.get('include_subdomains', False) or site.get('include_subdomains_for_pinning', False), - 'mode': site.get('mode'), - 'pinned': True if 'pins' in site else False, - } for site in r['entries']} + hsts = { + site['name']: { + 'includeSubDomains': site.get('include_subdomains', False), + 'includeSubDomainsForPinning': site.get('include_subdomains', False) + or site.get('include_subdomains_for_pinning', False), + 'mode': site.get('mode'), + 'pinned': True if 'pins' in site else False, + } + for site in r['entries'] + } # Add in the manually pinned domains for pinned_domain in SCANNER_PINNED_DOMAINS: @@ -68,7 +71,7 @@ def retrieve_store_hsts_preload_list(): 'includeSubDomains': True, 'includeSubDomainsForPinning': True, 'mode': 'force-https', - 'pinned': True + 'pinned': True, } # Write json file to disk @@ -97,46 +100,6 @@ def sanitize_headers(headers: dict) -> dict: return None -def valid_hostname(hostname: str): - """ - :param hostname: The hostname requested in the scan - :return: Hostname if it's valid, None if it's an IP address, otherwise False - """ - - # Block attempts to scan things like 'localhost' if not allowed - if ('.' not in hostname or 'localhost' in hostname) and not SCANNER_ALLOW_LOCALHOST: - return False - - # First, let's try to see if it's an IPv4 address - try: - socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address - return None # If we get this far, it's an IP address and therefore not a valid fqdn - except: - pass - - # And IPv6 - try: - socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 - return None - except: - pass - - # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time - # that the validator is making a network connection -- the same that requests would make. - try: - hostname_ips = socket.getaddrinfo(hostname, 443) - - # This shouldn't trigger, since getaddrinfo should generate saierror if there's no A records. Nevertheless, - # I want to be careful in case of edge cases. This does make it hard to test. - if len(hostname_ips) < 1: - return False - except: - return False - - # If we've made it this far, then everything is good to go! Woohoo! - return hostname - - # allow for this file to be run directly to fetch the HSTS preload list via the debugger # or via the regen script if __name__ == "__main__": diff --git a/httpobs/scripts/httpobs-local-scan b/httpobs/scripts/httpobs-local-scan deleted file mode 100755 index a7163ea3..00000000 --- a/httpobs/scripts/httpobs-local-scan +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python3 - -import httpobs.scanner.local - -import argparse -import json - -from operator import itemgetter -from urllib.parse import urlparse - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - - # Add the various arguments - parser.add_argument('--http-port', - default=80, - help='port to use for the HTTP scan (instead of 80)', - type=int) - parser.add_argument('--https-port', - default=443, - help='port to use for the HTTPS scan (instead of 443)', - type=int) - parser.add_argument('--path', - default=argparse.SUPPRESS, - help='path to scan, instead of /', - type=str) - parser.add_argument('--no-verify', - action='store_true', - help='disable certificate verification in the HSTS/HPKP tests') - parser.add_argument('--cookies', - default=argparse.SUPPRESS, - help='cookies to send in scan (json formatted)', - type=json.loads) - parser.add_argument('--headers', - default=argparse.SUPPRESS, - help='headers to send in scan (json formatted)', - type=json.loads) - parser.add_argument('--format', - default='json', - help='output format (json or report), default of json', - type=str) - parser.add_argument('hostname', - help='host to scan (hostname only, no protocol or port)', - type=str) - - args = vars(parser.parse_args()) - - # Remove the -- from the name, change - to underscore - args = {k.split('--')[-1].replace('-', '_'): v for k, v in args.items()} - output_format = args.pop('format').lower() - - # print out help if no arguments are specified, or bad arguments - if len(args) == 0 or output_format not in ('json', 'report'): - parser.print_help() - parser.exit(-1) - - # port can't be appended to hostname because we need both HTTP and HTTPS ports. - # protocol can't be prefixed either, as we scan both of those ports. - # - # use urlparse to ensure that neither of these are present in the hostname. - if urlparse(args['hostname']).scheme or urlparse('http://' + args['hostname']).port: - parser.print_help() - parser.exit(-1) - - # Because it makes sense this way - if args['http_port'] == 80: - del(args['http_port']) - - if args['https_port'] == 443: - del (args['https_port']) - - if args.pop('no_verify'): - args['verify'] = False - - # Get the scan results - r = httpobs.scanner.local.scan(**args) - - # print out the results to the command line - if output_format == 'json': - print(json.dumps(r, indent=4, sort_keys=True)) - elif output_format == 'report': - print('Score: {0} [{1}]'.format(r['scan']['score'], - r['scan']['grade'])) - - print('Modifiers:') - - # Get all the scores by test name - scores = [[k.replace('-', ' ').title(), v['score_modifier'], v['score_description']] - for k, v in r['tests'].items()] - scores = sorted(scores, key=itemgetter(0)) # [('test1', -5, 'foo'), ('test2', -10, 'bar')] - - for score in scores: - if score[1] > 0: - score[1] = '+' + str(score[1]) # display 5 as +5 - print(' {test:<30} [{modifier:>3}] {reason}'.format(test=score[0], - modifier=score[1], - reason=score[2])) diff --git a/httpobs/scripts/httpobs-mass-scan b/httpobs/scripts/httpobs-mass-scan deleted file mode 100755 index 4774b231..00000000 --- a/httpobs/scripts/httpobs-mass-scan +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import print_function - -from httpobs.conf import API_URL - -import grequests -import os -import requests -import sys -import time - -if 'HTTPOBS_DEV' in os.environ: # TODO: use httpobs.conf - MAX_QUEUE = 64 -else: - MAX_QUEUE = 256 - -if __name__ == '__main__': - if len(sys.argv) < 2: - print('Usage: ' + sys.argv[0] + ' ') - sys.exit(1) - - start_time = time.time() - total_scanned = 0 - - s = requests.Session() - - try: - with open(sys.argv[1], 'r') as alexafp: - hosts = [host.strip().split(',')[1] if ',' in host else host.strip() for host in alexafp] - except: - print('Cannot open ' + sys.argv[1]) - sys.exit(1) - - while True: - loop_time = time.time() - - # Get the queue availability - try: - r = s.get(API_URL + '/__stats__?verbose=true').json()['states'] - except: - time.sleep(5) - continue - - available = MAX_QUEUE - r.get('PENDING', 0) - r.get('RUNNING', 0) - r.get('STARTING', 0) - - print('Queue availability: {queue_avail}. Total scanned: {total_scanned}. Pending: {pending}.'.format( - queue_avail=available, total_scanned=total_scanned, pending=r.get('PENDING', 0))) - - # Quit if the scanner reports that nothing is pending - if not hosts and r.get('PENDING', 0) == 0: - break - - if available > 0: - targets = hosts[:available] - urls = [API_URL + '/analyze?host=' + host for host in targets] - total_scanned += available - - # Start up a new mass scan - try: - rs = (grequests.post(u) for u in urls) - grequests.map(rs) - except: - time.sleep(5) - raise - - hosts = hosts[available:] - - # If the previous loop completely quickly, cooldown a moment - if time.time() - loop_time < 5: - time.sleep(5) - - total_time = int(time.time() - start_time) - print('Elapsed time: {elapsed_time}s'.format(elapsed_time=total_time)) - print('Scans/sec: {speed}'.format(speed=total_scanned / total_time)) diff --git a/httpobs/scripts/httpobs-scan-worker b/httpobs/scripts/httpobs-scan-worker deleted file mode 100755 index f5d39040..00000000 --- a/httpobs/scripts/httpobs-scan-worker +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env sh - -if [ $(uname -s) = "Darwin" ]; then - ulimit -n 2048 # increase file descriptor limits on OS X -fi - -if [ -n "$HTTPOBS_MAX_CONCURRENCY" ]; then - CONCURRENCY=$HTTPOBS_MAX_CONCURRENCY - LOGLEVEL=warning -elif [ -n "$HTTPOBS_DEV" ]; then - CONCURRENCY=48 - LOGLEVEL=info -else - CONCURRENCY=96 - LOGLEVEL=warning -fi - -# Kill the existing celery workers -PID='/var/run/httpobs/scanner.pid' -if [ -f $PID ]; -then - kill `cat $PID` - rm -f $PID -fi - -# Execute celery -celery \ - -A httpobs.scanner.tasks \ - --autoscale=$CONCURRENCY,4 \ - --broker=$HTTPOBS_BROKER_URL \ - --detach \ - --hostname='scanner@%h' \ - --logfile='/var/log/httpobs/scanner.log' \ - --loglevel=$LOGLEVEL \ - --maxtasksperchild=16 \ - --pidfile='/var/run/httpobs/scanner.pid' \ -worker - -# Run the scanner -python3 -u httpobs/scanner/main.py >> /var/log/httpobs/scan-worker.log 2>&1 diff --git a/httpobs/scripts/scan.py b/httpobs/scripts/scan.py new file mode 100755 index 00000000..7ce0011d --- /dev/null +++ b/httpobs/scripts/scan.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python3 + +import argparse +import json +from operator import itemgetter +from urllib.parse import urlparse + +import httpobs.scanner + + +def main(): + parser = argparse.ArgumentParser() + + # Add the various arguments + parser.add_argument('--http-port', default=80, help='port to use for the HTTP scan (instead of 80)', type=int) + parser.add_argument('--https-port', default=443, help='port to use for the HTTPS scan (instead of 443)', type=int) + parser.add_argument('--path', default=argparse.SUPPRESS, help='path to scan, instead of /', type=str) + parser.add_argument( + '--no-verify', action='store_true', help='disable certificate verification in the HSTS/HPKP tests' + ) + parser.add_argument( + '--cookies', default=argparse.SUPPRESS, help='cookies to send in scan (json formatted)', type=json.loads + ) + parser.add_argument( + '--headers', default=argparse.SUPPRESS, help='headers to send in scan (json formatted)', type=json.loads + ) + parser.add_argument('--format', default='json', help='output format (json or report), default of json', type=str) + parser.add_argument('hostname', help='host to scan (hostname only, no protocol or port)', type=str) + + args = vars(parser.parse_args()) + + # Remove the -- from the name, change - to underscore + args = {k.split('--')[-1].replace('-', '_'): v for k, v in args.items()} + output_format = args.pop('format').lower() + + # print out help if no arguments are specified, or bad arguments + if len(args) == 0 or output_format not in ('json', 'report'): + parser.print_help() + parser.exit(-1) + + # port can't be appended to hostname because we need both HTTP and HTTPS ports. + # protocol can't be prefixed either, as we scan both of those ports. + # + # use urlparse to ensure that neither of these are present in the hostname. + if urlparse(args['hostname']).scheme or urlparse('http://' + args['hostname']).port: + parser.print_help() + parser.exit(-1) + + # Because it makes sense this way + if args['http_port'] == 80: + del args['http_port'] + + if args['https_port'] == 443: + del args['https_port'] + + if args.pop('no_verify'): + args['verify'] = False + + # Get the scan results + r = httpobs.scanner.scan(**args) + + # print out the results to the command line + if output_format == 'json': + print(json.dumps(r, indent=4, sort_keys=True)) + elif output_format == 'report': + print('Score: {0} [{1}]'.format(r['scan']['score'], r['scan']['grade'])) + + print('Modifiers:') + + # Get all the scores by test name + scores = [ + [k.replace('-', ' ').title(), v['score_modifier'], v['score_description']] for k, v in r['tests'].items() + ] + scores = sorted(scores, key=itemgetter(0)) # [('test1', -5, 'foo'), ('test2', -10, 'bar')] + + for score in scores: + if score[1] > 0: + score[1] = '+' + str(score[1]) # display 5 as +5 + print(' {test:<30} [{modifier:>3}] {reason}'.format(test=score[0], modifier=score[1], reason=score[2])) + + +if __name__ == "__main__": + main() diff --git a/httpobs/tests/unittests/test_content.py b/httpobs/tests/unittests/test_content.py index b0ef5ef6..87377cda 100644 --- a/httpobs/tests/unittests/test_content.py +++ b/httpobs/tests/unittests/test_content.py @@ -41,7 +41,9 @@ def test_contribute_too_large(self): self.assertEquals(result['data'], {}) def test_with_required_keys(self): - self.reqs['resources']['/contribute.json'] = """ + self.reqs['resources'][ + '/contribute.json' + ] = """ { "name": "Bedrock", "description": "The app powering www.mozilla.org.", @@ -86,7 +88,9 @@ def test_with_required_keys(self): self.assertTrue(result['pass']) def test_missing_required_keys(self): - self.reqs['resources']['/contribute.json'] = """ + self.reqs['resources'][ + '/contribute.json' + ] = """ { "name": "Bedrock", "description": "The app powering www.mozilla.org.", @@ -152,7 +156,9 @@ def test_not_html(self): # json, like what an API might return self.reqs['responses']['auto'].headers['Content-Type'] = 'application/json' - self.reqs['resources']['__path__'] = """ + self.reqs['resources'][ + '__path__' + ] = """ { 'foo': 'bar' } diff --git a/httpobs/tests/unittests/test_csp_parser.py b/httpobs/tests/unittests/test_csp_parser.py index 0ca40cf2..6201af95 100644 --- a/httpobs/tests/unittests/test_csp_parser.py +++ b/httpobs/tests/unittests/test_csp_parser.py @@ -8,73 +8,76 @@ def test_csp_parser(self): # one policy with one directive policy = ["default-src 'none'"] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"} - }) + self.assertEquals(parse_csp(policy), {'default-src': {"'none'"}}) # one policy with multiple directives policy = ["default-src 'none'; script-src 'self' https://mozilla.org"] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # two identical policies policy = [ "default-src 'none'; script-src 'self' https://mozilla.org", "default-src 'none'; script-src 'self' https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # two policies, one of which has a source that isn't in the other policy = [ "default-src 'none'; script-src 'self' https://mozilla.org", "default-src 'none'; script-src 'self' https://mozilla.org https://example.com", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # same thing as the previous policy, but the sources are in different orders policy = [ "default-src 'none'; script-src 'self' https://mozilla.org", "default-src 'none'; script-src https://example.com 'self' https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # a policy with two differing websites that should end up with 'none' policy = [ "default-src https://mozilla.org", "default-src https://mozilla.com", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # a policy with four differing websites that should end up with 'none' policy = [ "default-src https://mozilla.org https://mozilla.net", "default-src https://mozilla.com https://mozilla.io", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # a policy with a bunch of websites, with only two in common policy = [ "default-src https://mozilla.org https://mozilla.net https://mozilla.com https://mozilla.io", "default-src https://mozilla.pizza https://mozilla.ninja https://mozilla.net https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"https://mozilla.net", "https://mozilla.org"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"https://mozilla.net", "https://mozilla.org"}, + }, + ) # a four policies with a bunch of websites, with only two in common policy = [ @@ -83,27 +86,36 @@ def test_csp_parser(self): "default-src https://mozilla.net https://mozilla.fox https://mozilla.fire https://mozilla.org", "default-src https://mozilla.browser https://mozilla.web https://mozilla.net https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"https://mozilla.net", "https://mozilla.org"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"https://mozilla.net", "https://mozilla.org"}, + }, + ) # a policy with http: and https:, two differing sources that should end up with 'none' policy = [ "default-src http:", "default-src https:", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # a policy with http: and https:, two differing sources that should end up with 'none' policy = [ "default-src http: http:", "default-src https: https:", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # policies that are too short policies = ( diff --git a/httpobs/tests/unittests/test_environ.py b/httpobs/tests/unittests/test_environ.py deleted file mode 100644 index 17b8ee37..00000000 --- a/httpobs/tests/unittests/test_environ.py +++ /dev/null @@ -1,55 +0,0 @@ -# TODO: Revisit the SystemExit things when we have time -# from os import environ -# from unittest import TestCase -# -# -# class TestEnvironmentalVariables(TestCase): -# def test_no_broker_url(self): -# def __import_scanner_celeryconfig_no_broker_url(): -# import httpobs.scanner.celeryconfig -# if httpobs.scanner.celeryconfig.CELERY_IGNORE_RESULTS: -# pass -# -# def __import_database_celeryconfig_no_broker_url(): -# import httpobs.scanner.celeryconfig -# if httpobs.scanner.celeryconfig.CELERY_IGNORE_RESULTS: -# pass -# -# if 'BROKER_URL' in environ: -# BROKER_URL = environ['BROKER_URL'] -# del environ['BROKER_URL'] -# else: -# BROKER_URL = None -# -# self.assertRaises(SystemExit, __import_database_celeryconfig_no_broker_url) -# self.assertRaises(SystemExit, __import_scanner_celeryconfig_no_broker_url) -# -# if BROKER_URL: -# environ['BROKER_URL'] = BROKER_URL -# -# # Mock this -# # def test_broker_url(self): -# # environ['BROKER_URL'] = 'foo' -# # -# # import httpobs.database.celeryconfig -# # import httpobs.scanner.celeryconfig -# # -# # self.assertTrue(httpobs.database.celeryconfig.CELERY_IGNORE_RESULTS) -# # self.assertTrue(httpobs.scanner.celeryconfig.CELERY_IGNORE_RESULTS) -# -# def test_no_database_url(self): -# def __import_database_no_database_url(): -# import httpobs.database.database -# if httpobs.database.database.conn: -# pass -# -# if 'HTTPOBS_DATABASE_URL' in environ: -# HTTPOBS_DATABASE_URL = environ['HTTPOBS_DATABASE_URL'] -# del environ['HTTPOBS_DATABASE_URL'] -# else: -# HTTPOBS_DATABASE_URL = None -# -# self.assertRaises(SystemExit, __import_database_no_database_url) -# -# if HTTPOBS_DATABASE_URL: -# environ['HTTPOBS_DATABASE_URL'] = HTTPOBS_DATABASE_URL diff --git a/httpobs/tests/unittests/test_grades.py b/httpobs/tests/unittests/test_grades.py index 4d7bc555..74eb4ee7 100644 --- a/httpobs/tests/unittests/test_grades.py +++ b/httpobs/tests/unittests/test_grades.py @@ -5,8 +5,10 @@ class TestGrader(TestCase): def test_get_score_description(self): - self.assertEquals('Preloaded via the HTTP Public Key Pinning (HPKP) preloading process', - get_score_description('hpkp-preloaded')) + self.assertEquals( + 'Preloaded via the HTTP Public Key Pinning (HPKP) preloading process', + get_score_description('hpkp-preloaded'), + ) def test_get_score_modifier(self): self.assertEquals(0, get_score_modifier('hpkp-preloaded')) diff --git a/httpobs/tests/unittests/test_headers.py b/httpobs/tests/unittests/test_headers.py index 6e204727..cb2720ef 100644 --- a/httpobs/tests/unittests/test_headers.py +++ b/httpobs/tests/unittests/test_headers.py @@ -1,14 +1,16 @@ from http.cookiejar import Cookie from unittest import TestCase -from httpobs.scanner.analyzer.headers import (content_security_policy, - cookies, - public_key_pinning, - referrer_policy, - strict_transport_security, - x_content_type_options, - x_frame_options, - x_xss_protection) +from httpobs.scanner.analyzer.headers import ( + content_security_policy, + cookies, + public_key_pinning, + referrer_policy, + strict_transport_security, + x_content_type_options, + x_frame_options, + x_xss_protection, +) from httpobs.tests.utils import empty_requests, set_header @@ -115,8 +117,7 @@ def test_unsafe_inline(self): def test_unsafe_eval(self): reqs = empty_requests() - set_header(reqs['responses']['auto'], 'Content-Security-Policy', - "default-src 'none'; script-src 'unsafe-eval'") + set_header(reqs['responses']['auto'], 'Content-Security-Policy', "default-src 'none'; script-src 'unsafe-eval'") result = content_security_policy(reqs) @@ -126,16 +127,18 @@ def test_unsafe_eval(self): self.assertTrue(result['policy']['unsafeEval']) def test_unsafe_inline_in_style_src_only(self): - values = ("object-src 'none'; script-src 'none'; style-src 'unsafe-inline'", - "default-src 'none'; script-src https://mozilla.org; style-src 'unsafe-inline'", - "default-src 'unsafe-inline'; script-src https://mozilla.org", - "default-src 'none';;; ;;;style-src 'self' 'unsafe-inline'", - "default-src 'none'; style-src data:", - "default-src 'none'; style-src *", - "default-src 'none'; style-src https:", - "default-src 'none'; style-src 'unsafe-inline'; " + - "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' " + - "'unsafe-inline'") + values = ( + "object-src 'none'; script-src 'none'; style-src 'unsafe-inline'", + "default-src 'none'; script-src https://mozilla.org; style-src 'unsafe-inline'", + "default-src 'unsafe-inline'; script-src https://mozilla.org", + "default-src 'none';;; ;;;style-src 'self' 'unsafe-inline'", + "default-src 'none'; style-src data:", + "default-src 'none'; style-src *", + "default-src 'none'; style-src https:", + "default-src 'none'; style-src 'unsafe-inline'; " + + "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' " + + "'unsafe-inline'", + ) for value in values: reqs = empty_requests() @@ -150,20 +153,21 @@ def test_unsafe_inline_in_style_src_only(self): def test_no_unsafe(self): # See https://github.com/mozilla/http-observatory/issues/88 and # https://github.com/mozilla/http-observatory/issues/277 for 'unsafe-inline' + hash/nonce - values = ("default-src https://mozilla.org", - "default-src https://mozilla.org;;; ;;;script-src 'none'", - "object-src 'none'; script-src https://mozilla.org; " + - "style-src https://mozilla.org; upgrade-insecure-requests;", - "object-src 'none'; script-src 'strict-dynamic' 'nonce-abc' 'unsafe-inline'; style-src 'none'", - "object-src 'none'; style-src 'self';" + - "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='", - "object-src 'none'; style-src 'self'; script-src 'unsafe-inline' " + - "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='" + - "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB='", - "object-src 'none'; script-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'; style-src 'none'", - "default-src https://mozilla.org; style-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'", - "default-src https://mozilla.org; style-src 'unsafe-inline' " + - "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' 'unsafe-inline'") + values = ( + "default-src https://mozilla.org", + "default-src https://mozilla.org;;; ;;;script-src 'none'", + "object-src 'none'; script-src https://mozilla.org; " + + "style-src https://mozilla.org; upgrade-insecure-requests;", + "object-src 'none'; script-src 'strict-dynamic' 'nonce-abc' 'unsafe-inline'; style-src 'none'", + "object-src 'none'; style-src 'self';" + "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='", + "object-src 'none'; style-src 'self'; script-src 'unsafe-inline' " + + "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='" + + "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB='", + "object-src 'none'; script-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'; style-src 'none'", + "default-src https://mozilla.org; style-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'", + "default-src https://mozilla.org; style-src 'unsafe-inline' " + + "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' 'unsafe-inline'", + ) for value in values: reqs = empty_requests() @@ -175,7 +179,6 @@ def test_no_unsafe(self): self.assertTrue(result['pass']) def test_no_unsafe_default_src_none(self): - # An HTTP header (default-src http:) and HTTP equiv (default-src https:), with differing values # that should end up as default-src 'none' reqs = empty_requests('test_parse_http_equiv_headers_csp2.html') @@ -189,8 +192,8 @@ def test_no_unsafe_default_src_none(self): values = ( "default-src", # no value == 'none' TODO: Fix this "default-src 'none'; script-src 'strict-dynamic' 'nonce-abc123' 'unsafe-inline'", - "default-src 'none'; script-src https://mozilla.org;" + - "style-src https://mozilla.org; upgrade-insecure-requests;", + "default-src 'none'; script-src https://mozilla.org;" + + "style-src https://mozilla.org; upgrade-insecure-requests;", "default-src 'none'; object-src https://mozilla.org", ) @@ -368,82 +371,90 @@ def test_missing(self): def test_secure_with_httponly_sessions(self): # Python cookies are the literal worst, seriously, the worst - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='foo', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='foo', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) # See: https://github.com/mozilla/http-observatory/issues/121 for the __cfduid insanity - cookie = Cookie(name='__cfduid', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rest={}, - rfc2109=False, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='__cfduid', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rest={}, + rfc2109=False, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) # See: https://github.com/mozilla/http-observatory/issues/282 for the heroku-session-affinity insanity - cookie = Cookie(name='heroku-session-affinity', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rest={}, - rfc2109=False, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='heroku-session-affinity', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rest={}, + rfc2109=False, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -453,239 +464,266 @@ def test_secure_with_httponly_sessions(self): self.assertFalse(result['sameSite']) def test_secure_with_httponly_sessions_and_samesite(self): - cookie = Cookie(name='SESSIONID_SAMESITE_STRICT', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Strict'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_STRICT', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Strict'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_LAX', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Lax'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_LAX', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Lax'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_NONE', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'None'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_NONE', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'None'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) self.assertEquals('cookies-secure-with-httponly-sessions-and-samesite', result['result']) - self.assertEquals({ - 'SESSIONID_SAMESITE_STRICT': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Strict', - 'secure': True}, - 'SESSIONID_SAMESITE_LAX': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Lax', - 'secure': True}, - 'SESSIONID_SAMESITE_NONE': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'None', - 'secure': True} - }, - result['data']) + self.assertEquals( + { + 'SESSIONID_SAMESITE_STRICT': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Strict', + 'secure': True, + }, + 'SESSIONID_SAMESITE_LAX': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Lax', + 'secure': True, + }, + 'SESSIONID_SAMESITE_NONE': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'None', + 'secure': True, + }, + }, + result['data'], + ) self.assertTrue(result['pass']) self.assertTrue(result['sameSite']) def test_secure_with_httponly_sessions_and_samesite_not_awarded_if_not_all_cookies_samesite(self): - cookie = Cookie(name='SESSIONID_SAMESITE_STRICT', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Strict'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_STRICT', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Strict'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_NO_SAMESITE', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_NO_SAMESITE', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_LAX', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Lax'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_LAX', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Lax'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_NONE', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'None'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_NONE', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'None'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) self.assertEquals('cookies-secure-with-httponly-sessions', result['result']) - self.assertEquals({ - 'SESSIONID_SAMESITE_STRICT': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Strict', - 'secure': True}, - 'SESSIONID_NO_SAMESITE': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': False, - 'secure': True}, - 'SESSIONID_SAMESITE_LAX': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Lax', - 'secure': True}, - 'SESSIONID_SAMESITE_NONE': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'None', - 'secure': True} - }, - result['data']) + self.assertEquals( + { + 'SESSIONID_SAMESITE_STRICT': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Strict', + 'secure': True, + }, + 'SESSIONID_NO_SAMESITE': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': False, + 'secure': True, + }, + 'SESSIONID_SAMESITE_LAX': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Lax', + 'secure': True, + }, + 'SESSIONID_SAMESITE_NONE': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'None', + 'secure': True, + }, + }, + result['data'], + ) self.assertTrue(result['pass']) self.assertFalse(result['sameSite']) def test_anticsrf_without_samesite(self): - cookie = Cookie(name='CSRFTOKEN', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='CSRFTOKEN', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -695,23 +733,25 @@ def test_anticsrf_without_samesite(self): self.assertFalse(result['sameSite']) def test_samesite_invalid_empty(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': None}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': None}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -721,23 +761,25 @@ def test_samesite_invalid_empty(self): self.assertIsNone(result['sameSite']) def test_samesite_invalid_true(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -747,23 +789,25 @@ def test_samesite_invalid_true(self): self.assertIsNone(result['sameSite']) def test_samesite_invalid(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Invalid'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Invalid'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -773,23 +817,25 @@ def test_samesite_invalid(self): self.assertIsNone(result['sameSite']) def test_regular_cookie_no_secure_but_hsts(self): - cookie = Cookie(name='foo', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='foo', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) self.reqs['responses']['https'].headers['Strict-Transport-Security'] = 'max-age=15768000' @@ -800,23 +846,25 @@ def test_regular_cookie_no_secure_but_hsts(self): self.assertFalse(result['sameSite']) def test_session_cookie_no_secure_but_hsts(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) self.reqs['responses']['https'].headers['Strict-Transport-Security'] = 'max-age=15768000' @@ -827,23 +875,25 @@ def test_session_cookie_no_secure_but_hsts(self): self.assertFalse(result['sameSite']) def test_no_secure(self): - cookie = Cookie(name='foo', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='foo', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -853,23 +903,25 @@ def test_no_secure(self): self.assertFalse(result['sameSite']) def test_session_no_httponly(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -879,23 +931,25 @@ def test_session_no_httponly(self): self.assertFalse(result['sameSite']) def test_session_no_secure(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -905,23 +959,25 @@ def test_session_no_secure(self): self.assertFalse(result['sameSite']) # https://github.com/mozilla/http-observatory/issues/97 - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -958,7 +1014,8 @@ def test_header_invalid(self): self.reqs['responses']['https'].headers['Public-Key-Pins'] = ( 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) self.assertEquals('hpkp-header-invalid', result['result']) @@ -970,7 +1027,8 @@ def test_header_invalid(self): self.reqs['responses']['https'].headers['Public-Key-Pins'] = ( 'max-age=15768000; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) @@ -995,7 +1053,8 @@ def test_invalid_cert(self): 'includeSubDomains; ' 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) self.reqs['responses']['https'].verified = False result = public_key_pinning(self.reqs) @@ -1008,7 +1067,8 @@ def test_max_age_too_low(self): 'max-age=86400; ' 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) @@ -1021,7 +1081,8 @@ def test_implemented(self): 'includeSubDomains; ' 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) @@ -1069,11 +1130,13 @@ def tearDown(self): self.reqs = None def test_header_private(self): - for policy in ['no-referrer', - 'same-origin', - 'strict-origin', - 'STRICT-ORIGIN', - 'strict-origin-when-cross-origin']: + for policy in [ + 'no-referrer', + 'same-origin', + 'strict-origin', + 'STRICT-ORIGIN', + 'strict-origin-when-cross-origin', + ]: self.reqs['responses']['auto'].headers['Referrer-Policy'] = policy result = referrer_policy(self.reqs) @@ -1133,8 +1196,10 @@ def test_header_unsafe(self): self.assertFalse(result['pass']) def test_multiple_value_header_all_valid(self): - valid_but_unsafe_policies = ['origin-when-cross-origin, no-referrer, unsafe-url', # safe in the middle - 'no-referrer, unsafe-url'] # safe at the beginning + valid_but_unsafe_policies = [ + 'origin-when-cross-origin, no-referrer, unsafe-url', # safe in the middle + 'no-referrer, unsafe-url', + ] # safe at the beginning for policy in valid_but_unsafe_policies: self.reqs['responses']['auto'].headers['Referrer-Policy'] = policy @@ -1182,8 +1247,9 @@ def test_header_invalid(self): self.assertFalse(result['pass']) # If the header is set twice - self.reqs['responses']['https'].headers['Strict-Transport-Security'] = \ - 'max-age=15768000; includeSubDomains, max-age=15768000; includeSubDomains' + self.reqs['responses']['https'].headers[ + 'Strict-Transport-Security' + ] = 'max-age=15768000; includeSubDomains, max-age=15768000; includeSubDomains' result = strict_transport_security(self.reqs) @@ -1201,8 +1267,9 @@ def test_no_https(self): self.assertFalse(result['pass']) def test_invalid_cert(self): - self.reqs['responses']['https'].headers['Strict-Transport-Security'] = \ - 'max-age=15768000; includeSubDomains; preload' + self.reqs['responses']['https'].headers[ + 'Strict-Transport-Security' + ] = 'max-age=15768000; includeSubDomains; preload' self.reqs['responses']['https'].verified = False result = strict_transport_security(self.reqs) @@ -1219,8 +1286,9 @@ def test_max_age_too_low(self): self.assertFalse(result['pass']) def test_implemented(self): - self.reqs['responses']['https'].headers['Strict-Transport-Security'] = \ - 'max-age=15768000; includeSubDomains; preload' + self.reqs['responses']['https'].headers[ + 'Strict-Transport-Security' + ] = 'max-age=15768000; includeSubDomains; preload' result = strict_transport_security(self.reqs) @@ -1364,10 +1432,7 @@ def test_missing(self): self.assertFalse(result['pass']) def test_header_invalid(self): - for value in ('whimsy', - '2; mode=block', - '1; mode=block; mode=block', - '1; mode=block, 1; mode=block'): + for value in ('whimsy', '2; mode=block', '1; mode=block; mode=block', '1; mode=block, 1; mode=block'): self.reqs['responses']['auto'].headers['X-XSS-Protection'] = value result = x_xss_protection(self.reqs) diff --git a/httpobs/tests/unittests/test_misc.py b/httpobs/tests/unittests/test_misc.py index 61b1072a..4a6ef6ef 100644 --- a/httpobs/tests/unittests/test_misc.py +++ b/httpobs/tests/unittests/test_misc.py @@ -46,8 +46,9 @@ def test_acao_restricted_with_acao(self): def test_acao_universal_with_acao(self): self.reqs['responses']['cors'].request.headers['Origin'] = 'https://http-observatory.security.mozilla.org' - self.reqs['responses']['cors'].headers['Access-Control-Allow-Origin'] = \ - 'https://http-observatory.security.mozilla.org' + self.reqs['responses']['cors'].headers[ + 'Access-Control-Allow-Origin' + ] = 'https://http-observatory.security.mozilla.org' self.reqs['responses']['cors'].headers['Access-Control-Allow-Credentials'] = 'true' result = cross_origin_resource_sharing(self.reqs) @@ -56,7 +57,9 @@ def test_acao_universal_with_acao(self): self.assertFalse(result['pass']) def test_acao_restricted_with_crossdomain(self): - self.reqs['resources']['/crossdomain.xml'] = """ + self.reqs['resources'][ + '/crossdomain.xml' + ] = """ @@ -69,7 +72,9 @@ def test_acao_restricted_with_crossdomain(self): self.assertTrue(result['pass']) def test_acao_universal_with_crossdomain(self): - self.reqs['resources']['/crossdomain.xml'] = """ + self.reqs['resources'][ + '/crossdomain.xml' + ] = """ """ @@ -80,7 +85,9 @@ def test_acao_universal_with_crossdomain(self): self.assertFalse(result['pass']) def test_acao_restricted_with_clientaccess(self): - self.reqs['resources']['/clientaccesspolicy.xml'] = """ + self.reqs['resources'][ + '/clientaccesspolicy.xml' + ] = """ @@ -95,12 +102,13 @@ def test_acao_restricted_with_clientaccess(self): result = cross_origin_resource_sharing(self.reqs) self.assertEquals('cross-origin-resource-sharing-implemented-with-restricted-access', result['result']) - self.assertEquals(['http-observatory.security.mozilla.org', 'github.com'], - result['data']['clientaccesspolicy']) + self.assertEquals(['http-observatory.security.mozilla.org', 'github.com'], result['data']['clientaccesspolicy']) self.assertTrue(result['pass']) def test_acao_universal_with_clientaccess(self): - self.reqs['resources']['/clientaccesspolicy.xml'] = """ + self.reqs['resources'][ + '/clientaccesspolicy.xml' + ] = """ @@ -164,8 +172,10 @@ def test_redirects_to_https(self): result = redirection(self.reqs) self.assertEquals('redirection-to-https', result['result']) - self.assertEquals(['http://http-observatory.security.mozilla.org/', - 'https://http-observatory.security.mozilla.org/'], result['route']) + self.assertEquals( + ['http://http-observatory.security.mozilla.org/', 'https://http-observatory.security.mozilla.org/'], + result['route'], + ) self.assertTrue(result['pass']) def test_redirects_to_https_with_port_number(self): @@ -181,8 +191,10 @@ def test_redirects_to_https_with_port_number(self): result = redirection(self.reqs) self.assertEquals('redirection-to-https', result['result']) - self.assertEquals(['http://http-observatory.security.mozilla.org/', - 'https://http-observatory.security.mozilla.org:443/'], result['route']) + self.assertEquals( + ['http://http-observatory.security.mozilla.org/', 'https://http-observatory.security.mozilla.org:443/'], + result['route'], + ) self.assertTrue(result['pass']) def test_redirects_invalid_cert(self): @@ -237,10 +249,12 @@ def test_first_redirection_off_host(self): def test_all_redirections_preloaded(self): self.reqs['responses']['http'].url = 'https://www.pokeinthe.io/foo/bar' - for url in ('http://pokeinthe.io/', - 'https://pokeinthe.io/', - 'https://www.pokeinthe.io/', - 'https://baz.pokeinthe.io/foo'): + for url in ( + 'http://pokeinthe.io/', + 'https://pokeinthe.io/', + 'https://www.pokeinthe.io/', + 'https://baz.pokeinthe.io/foo', + ): history = UserDict() history.request = UserDict() history.request.url = url diff --git a/httpobs/tests/unittests/test_parse_http_equiv_headers.py b/httpobs/tests/unittests/test_parse_http_equiv_headers.py index ba27d2ac..9e0dbc74 100644 --- a/httpobs/tests/unittests/test_parse_http_equiv_headers.py +++ b/httpobs/tests/unittests/test_parse_http_equiv_headers.py @@ -13,8 +13,7 @@ def tearDown(self): def test_header_match(self): reqs = empty_requests('test_parse_http_equiv_headers_csp1.html') - self.assertEquals(reqs['responses']['auto'].http_equiv, - {'Content-Security-Policy': ['default-src \'none\';']}) + self.assertEquals(reqs['responses']['auto'].http_equiv, {'Content-Security-Policy': ['default-src \'none\';']}) def test_header_case_insensitivity(self): reqs = empty_requests('test_parse_http_equiv_headers_csp1.html') @@ -25,9 +24,12 @@ def test_header_case_insensitivity(self): def test_multiple_http_equivs(self): reqs = empty_requests('test_parse_http_equiv_headers_csp_multiple_http_equiv1.html') - self.assertEquals(reqs['responses']['auto'].http_equiv['Content-Security-Policy'], [ - "default-src 'none'; object-src 'none'; media-src 'none';", - "connect-src 'self'; font-src 'self'; child-src 'self'", - "img-src 'self'; style-src 'self' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ", - "script-src 'self' 'unsafe-inline' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ'", - ]) + self.assertEquals( + reqs['responses']['auto'].http_equiv['Content-Security-Policy'], + [ + "default-src 'none'; object-src 'none'; media-src 'none';", + "connect-src 'self'; font-src 'self'; child-src 'self'", + "img-src 'self'; style-src 'self' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ", + "script-src 'self' 'unsafe-inline' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ'", + ], + ) diff --git a/httpobs/tests/unittests/test_retriever.py b/httpobs/tests/unittests/test_retriever.py index 1df50d0c..6107c8ec 100644 --- a/httpobs/tests/unittests/test_retriever.py +++ b/httpobs/tests/unittests/test_retriever.py @@ -1,9 +1,9 @@ import random -import requests import string - from unittest import TestCase +import requests + from httpobs.scanner.retriever import get_duplicate_header_values, retrieve_all from httpobs.tests.utils import empty_requests @@ -22,10 +22,10 @@ def test_retrieve_non_existent_domain(self): self.assertEquals(domain, reqs['hostname']) self.assertEquals({}, reqs['resources']) - def test_retrieve_mozilla(self): - reqs = retrieve_all('mozilla.org') + def test_retrieve_mdn(self): + reqs = retrieve_all('developer.mozilla.org') - # Various things we know about mozilla.org + # Various things we know about developer.mozilla.org self.assertIsNotNone(reqs['resources']['__path__']) self.assertIsNotNone(reqs['resources']['/contribute.json']) self.assertIsNotNone(reqs['resources']['/robots.txt']) @@ -38,12 +38,12 @@ def test_retrieve_mozilla(self): self.assertIsInstance(reqs['responses']['https'], requests.Response) self.assertIsInstance(reqs['session'], requests.Session) - self.assertEquals(reqs['hostname'], 'mozilla.org') + self.assertEquals(reqs['hostname'], 'developer.mozilla.org') self.assertEquals('text/html', reqs['responses']['auto'].headers['Content-Type'][0:9]) - self.assertEquals(2, len(reqs['responses']['auto'].history)) + self.assertEquals(1, len(reqs['responses']['auto'].history)) self.assertEquals(200, reqs['responses']['auto'].status_code) - self.assertEquals('https://www.mozilla.org/en-US/', reqs['responses']['auto'].url) + self.assertEquals('https://developer.mozilla.org/en-US/', reqs['responses']['auto'].url) def test_retrieve_invalid_cert(self): reqs = retrieve_all('expired.badssl.com') @@ -58,5 +58,5 @@ def test_multiple_csp_headers_in_http(self): self.assertEquals( get_duplicate_header_values(reqs['responses']['auto'], 'Content-Security-Policy'), - ["script-src 'unsafe-inline'", 'img-src https://google.com'] + ["script-src 'unsafe-inline'", 'img-src https://google.com'], ) diff --git a/httpobs/tests/unittests/test_sanitize_headers.py b/httpobs/tests/unittests/test_sanitize_headers.py index 61d7c104..0e337d97 100644 --- a/httpobs/tests/unittests/test_sanitize_headers.py +++ b/httpobs/tests/unittests/test_sanitize_headers.py @@ -6,17 +6,11 @@ class TestValidHostname(TestCase): def test_valid_size_headers(self): # TODO: Try to find a site with www.site.foo but not site.foo - headers = { - 'Content-Type': 'text/html', - 'Location': '/whatever' - } + headers = {'Content-Type': 'text/html', 'Location': '/whatever'} self.assertEquals(headers, sanitize_headers(headers)) def test_huge_headers(self): - headers = { - 'Content-Type': 'text/html', - 'Location': '/whatever' * 10000 - } + headers = {'Content-Type': 'text/html', 'Location': '/whatever' * 10000} self.assertIsNone(sanitize_headers(headers)) diff --git a/httpobs/tests/unittests/test_valid_hostname.py b/httpobs/tests/unittests/test_valid_hostname.py index 82f129e0..ba341df4 100644 --- a/httpobs/tests/unittests/test_valid_hostname.py +++ b/httpobs/tests/unittests/test_valid_hostname.py @@ -1,7 +1,6 @@ from unittest import TestCase -from unittest.mock import patch -from httpobs.scanner.utils import valid_hostname +from httpobs.website.utils import valid_hostname class TestValidHostname(TestCase): @@ -18,7 +17,3 @@ def test_invalid_hostname(self): self.assertFalse(valid_hostname('_spf.google.com')) # no A records self.assertFalse(valid_hostname('127.0.0.1')) self.assertFalse(valid_hostname('2607:f8b0:4009:80b::200e')) - - @patch('httpobs.scanner.utils.SCANNER_ALLOW_LOCALHOST', 'yes') - def test_valid_localhost(self): - self.assertTrue(valid_hostname('localhost')) diff --git a/httpobs/tests/utils.py b/httpobs/tests/utils.py index 7ba614b4..8e2dcb8e 100644 --- a/httpobs/tests/utils.py +++ b/httpobs/tests/utils.py @@ -1,12 +1,12 @@ +import os.path from collections import UserDict from copy import deepcopy -from requests.cookies import RequestsCookieJar from typing import Union + +from requests.cookies import RequestsCookieJar from urllib3 import HTTPResponse from urllib3._collections import HTTPHeaderDict -import os.path - from httpobs.scanner.utils import parse_http_equiv_headers diff --git a/httpobs/website/__init__.py b/httpobs/website/__init__.py index ea91afc4..dc2d3f42 100644 --- a/httpobs/website/__init__.py +++ b/httpobs/website/__init__.py @@ -1,4 +1,3 @@ from httpobs.website.decorators import add_response_headers, sanitized_api_response -__all__ = ['add_response_headers', - 'sanitized_api_response'] +__all__ = ['add_response_headers', 'sanitized_api_response'] diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 87fdcac7..9ef3b8fd 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -1,22 +1,24 @@ -from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN -from httpobs.scanner import STATES -from httpobs.scanner.grader import get_score_description, GRADES -from httpobs.scanner.utils import valid_hostname -from httpobs.website import add_response_headers, sanitized_api_response +import json +import os.path +import sys from flask import Blueprint, jsonify, make_response, request from werkzeug.http import http_date import httpobs.database as database -import json -import os.path - +from httpobs import STATE_FAILED, STATE_RUNNING, STATES +from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN, DEVELOPMENT_MODE +from httpobs.scanner import scan +from httpobs.scanner.grader import GRADES, get_score_description +from httpobs.website import add_response_headers, sanitized_api_response +from httpobs.website.utils import valid_hostname api = Blueprint('api', __name__) # TODO: Implement API to write public and private headers to the database + @api.route('/api/v1/analyze', methods=['GET', 'OPTIONS', 'POST']) @add_response_headers(cors=True) @sanitized_api_response @@ -24,11 +26,13 @@ def api_post_scan_hostname(): # TODO: Allow people to accidentally use https://mozilla.org and convert to mozilla.org # Get the hostname - hostname = request.args.get('host', '').lower() + hostname = request.args.get('host', '').lower().strip() # Fail if it's not a valid hostname (not in DNS, not a real hostname, etc.) ip = True if valid_hostname(hostname) is None else False - hostname = valid_hostname(hostname) or valid_hostname('www.' + hostname) # prepend www. if necessary + hostname = valid_hostname(hostname) or ( + valid_hostname('www.' + hostname) if hostname else False + ) # prepend www. if necessary if ip: return { @@ -65,18 +69,48 @@ def api_post_scan_hostname(): # Begin the dispatch process if it was a POST if request.method == 'POST': row = database.insert_scan(site_id, hidden=hidden) + scan_id = row["id"] + + # Once celery kicks off the task, let's update the scan state from PENDING to RUNNING + database.update_scan_state(scan_id, STATE_RUNNING) + + # Get the site's cookies and headers + headers = database.select_site_headers(hostname) + + try: + result = scan(hostname, headers=headers.get("headers", {}), cookies=headers.get("cookies", {})) + + if "error" in result: + row = database.update_scan_state(scan_id, STATE_FAILED, error=result["error"]) + else: + row = database.insert_test_results( + site_id, + scan_id, + result, + ) + except: + # If we are unsuccessful, close out the scan in the database + row = database.update_scan_state(scan_id, STATE_FAILED) + + # Print the exception to stderr if we're in dev + if DEVELOPMENT_MODE: + import traceback + + print("Error detected in scan for: " + hostname) + traceback.print_exc(file=sys.stderr) else: return { 'error': 'recent-scan-not-found', 'text': 'Recently completed scan for {hostname} not found'.format( - hostname=request.args.get('host', '')) + hostname=request.args.get('host', '') + ), } # If there was a rescan attempt and it returned a row, it's because the rescan was done within the cooldown window elif rescan and request.method == 'POST': return { 'error': 'rescan-attempt-too-soon', - 'text': '{hostname} is on temporary cooldown'.format(hostname=request.args.get('host', '')) + 'text': '{hostname} is on temporary cooldown'.format(hostname=request.args.get('host', '')), } # Return the scan row @@ -120,8 +154,9 @@ def api_get_host_history(): return jsonify({'error': 'No history found'}) # Prune for when the score doesn't change; thanks to chuck for the elegant list comprehension - pruned_history = [v for k, v in enumerate(history) if history[k].get('score') is not history[k - 1].get('score') or - k == 0] + pruned_history = [ + v for k, v in enumerate(history) if history[k].get('score') is not history[k - 1].get('score') or k == 0 + ] # Return the host history return jsonify(pruned_history) @@ -142,9 +177,9 @@ def api_get_recent_scans(): except ValueError: return {'error': 'invalid-parameters'} - return jsonify(database.select_scan_recent_finished_scans(num_scans=num_scans, - min_score=min_score, - max_score=max_score)) + return jsonify( + database.select_scan_recent_finished_scans(num_scans=num_scans, min_score=min_score, max_score=max_score) + ) # TODO: Deprecate @@ -185,31 +220,38 @@ def api_get_scanner_stats(): stats['most_recent_scan_datetime'] = http_date(stats['most_recent_scan_datetime'].utctimetuple()) stats['recent_scans'] = {http_date(i.utctimetuple()): v for i, v in stats['recent_scans']} - resp = make_response(json.dumps({ - 'gradeDistribution': { - 'latest': grade_distribution, - 'all': grade_distribution_all_scans, - }, - 'gradeImprovements': grade_improvements, - 'misc': { - 'mostRecentScanDate': stats['most_recent_scan_datetime'], - 'numHoursWithoutScansInLast24Hours': 24 - len(stats['recent_scans']) if verbose else -1, - 'numImprovedSites': sum([v for k, v in grade_improvements_all.items() if k > 0]), - 'numScans': stats['scan_count'], - 'numScansLast24Hours': sum(stats['recent_scans'].values()) if verbose else -1, - 'numSuccessfulScans': sum(grade_distribution_all_scans.values()), - 'numUniqueSites': sum(grade_improvements_all.values()) - }, - 'recent': { - 'scans': { - 'best': database.select_scan_recent_finished_scans(13, 90, 1000), # 13, as there are 13 grades - 'recent': database.select_scan_recent_finished_scans(13, 0, 1000), # 13, as there are 13 grades - 'worst': database.select_scan_recent_finished_scans(13, 0, 20), # 13, as there are 13 grades - 'numPerHourLast24Hours': stats['recent_scans'], + resp = make_response( + json.dumps( + { + 'gradeDistribution': { + 'latest': grade_distribution, + 'all': grade_distribution_all_scans, + }, + 'gradeImprovements': grade_improvements, + 'misc': { + 'mostRecentScanDate': stats['most_recent_scan_datetime'], + 'numHoursWithoutScansInLast24Hours': 24 - len(stats['recent_scans']) if verbose else -1, + 'numImprovedSites': sum([v for k, v in grade_improvements_all.items() if k > 0]), + 'numScans': stats['scan_count'], + 'numScansLast24Hours': sum(stats['recent_scans'].values()) if verbose else -1, + 'numSuccessfulScans': sum(grade_distribution_all_scans.values()), + 'numUniqueSites': sum(grade_improvements_all.values()), + }, + 'recent': { + 'scans': { + 'best': database.select_scan_recent_finished_scans(13, 90, 1000), # 13, as there are 13 grades + 'recent': database.select_scan_recent_finished_scans(13, 0, 1000), # 13, as there are 13 grades + 'worst': database.select_scan_recent_finished_scans(13, 0, 20), # 13, as there are 13 grades + 'numPerHourLast24Hours': stats['recent_scans'], + }, + }, + 'states': {state: stats['states'].get(state, 0) for state in STATES}, }, - }, - 'states': {state: stats['states'].get(state, 0) for state in STATES}, - }, indent=4 if pretty else None, sort_keys=pretty, default=str)) + indent=4 if pretty else None, + sort_keys=pretty, + default=str, + ) + ) resp.mimetype = 'application/json' diff --git a/httpobs/website/decorators.py b/httpobs/website/decorators.py index 23c347d7..53cccaf1 100644 --- a/httpobs/website/decorators.py +++ b/httpobs/website/decorators.py @@ -1,6 +1,7 @@ -from flask import jsonify, make_response, request from functools import wraps +from flask import jsonify, make_response, request + def add_response_headers(headers=None, default_headers=None, cors=False): """ @@ -14,8 +15,9 @@ def add_response_headers(headers=None, default_headers=None, cors=False): if not default_headers: default_headers = { - 'Content-Security-Policy': ("default-src 'none'; base-uri 'none'; " - "form-action 'none'; frame-ancestors 'none'"), + 'Content-Security-Policy': ( + "default-src 'none'; base-uri 'none'; " "form-action 'none'; frame-ancestors 'none'" + ), 'Referrer-Policy': 'no-referrer', 'Strict-Transport-Security': 'max-age=63072000', 'X-Content-Type-Options': 'nosniff', @@ -35,16 +37,19 @@ def wrapper(*args, **kwargs): # Append the CORS headers if cors: - headers.update({ - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Methods': ', '.join(request.url_rule.methods), - 'Access-Control-Max-Age': '86400', - }) + headers.update( + { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': ', '.join(request.url_rule.methods), + 'Access-Control-Max-Age': '86400', + } + ) # Append the headers to the response for header, value in headers.items(): resp.headers[header] = value return resp + return wrapper return decorator @@ -55,11 +60,34 @@ def sanitized_api_response(fn): def wrapper(*args, **kwargs): output = fn(*args, **kwargs) - SCAN_VALID_KEYS = ('algorithm_version', 'end_time', 'error', 'grade', 'hidden', 'likelihood_indicator', - 'response_headers', 'scan_id', 'score', 'start_time', 'state', 'status_code', - 'tests_completed', 'tests_failed', 'tests_passed', 'tests_quantity') - TEST_RESULT_VALID_KEYS = ('error', 'expectation', 'name', 'output', 'pass', 'result', - 'score_description', 'score_modifier') + SCAN_VALID_KEYS = ( + 'algorithm_version', + 'end_time', + 'error', + 'grade', + 'hidden', + 'likelihood_indicator', + 'response_headers', + 'scan_id', + 'score', + 'start_time', + 'state', + 'status_code', + 'tests_completed', + 'tests_failed', + 'tests_passed', + 'tests_quantity', + ) + TEST_RESULT_VALID_KEYS = ( + 'error', + 'expectation', + 'name', + 'output', + 'pass', + 'result', + 'score_description', + 'score_modifier', + ) # Convert it to a dict (in case it's a DictRow) output = dict(output) @@ -70,7 +98,7 @@ def wrapper(*args, **kwargs): # Remove 'error' if it's null if output['error'] is None: - del(output['error']) + del output['error'] # Delete any other things that might have made their way into the results output = {k: output[k] for k in SCAN_VALID_KEYS if k in output} @@ -81,4 +109,5 @@ def wrapper(*args, **kwargs): output[test] = {k: output[test][k] for k in output[test] if k in TEST_RESULT_VALID_KEYS} return jsonify(output) + return wrapper diff --git a/httpobs/website/main.py b/httpobs/website/main.py index d253d6f4..b7e6ad76 100644 --- a/httpobs/website/main.py +++ b/httpobs/website/main.py @@ -2,7 +2,7 @@ from flask import Flask -from httpobs.conf import DEVELOPMENT_MODE, API_PORT, API_PROPAGATE_EXCEPTIONS +from httpobs.conf import API_PORT, API_PROPAGATE_EXCEPTIONS, DEVELOPMENT_MODE from httpobs.website import add_response_headers from httpobs.website.api import api from httpobs.website.monitoring import monitoring_api @@ -26,6 +26,9 @@ def main() -> str: return 'Welcome to the HTTP Observatory!' +def run(): + app.run(debug=DEVELOPMENT_MODE, port=API_PORT) + + if __name__ == '__main__': - app.run(debug=DEVELOPMENT_MODE, - port=API_PORT) + run() diff --git a/httpobs/website/monitoring.py b/httpobs/website/monitoring.py index 963d8a12..8fdfdafc 100644 --- a/httpobs/website/monitoring.py +++ b/httpobs/website/monitoring.py @@ -1,9 +1,8 @@ -from flask import abort, Blueprint, jsonify +from flask import Blueprint, abort, jsonify from httpobs import SOURCE_URL, VERSION from httpobs.database import get_cursor - monitoring_api = Blueprint('monitoring-api', __name__) @@ -27,5 +26,4 @@ def lbheartbeat(): @monitoring_api.route('/__version__') def version(): - return jsonify({'source': SOURCE_URL, - 'version': VERSION}) + return jsonify({'source': SOURCE_URL, 'version': VERSION}) diff --git a/httpobs/website/requirements.txt b/httpobs/website/requirements.txt deleted file mode 100644 index 3a26679c..00000000 --- a/httpobs/website/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -Flask==1.0.2 -uWSGI==2.0.17.1 diff --git a/httpobs/website/utils.py b/httpobs/website/utils.py new file mode 100644 index 00000000..65fac948 --- /dev/null +++ b/httpobs/website/utils.py @@ -0,0 +1,41 @@ +import socket + + +def valid_hostname(hostname: str): + """ + :param hostname: The hostname requested in the scan + :return: Hostname if it's valid, None if it's an IP address, otherwise False + """ + + # Block attempts to scan things like 'localhost' + if '.' not in hostname or 'localhost' in hostname: + return False + + # First, let's try to see if it's an IPv4 address + try: + socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address + return None # If we get this far, it's an IP address and therefore not a valid fqdn + except: + pass + + # And IPv6 + try: + socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 + return None + except: + pass + + # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time + # that the validator is making a network connection -- the same that requests would make. + try: + hostname_ips = socket.getaddrinfo(hostname, 443) + + # This shouldn't trigger, since getaddrinfo should generate saierror if there's no A records. Nevertheless, + # I want to be careful in case of edge cases. This does make it hard to test. + if len(hostname_ips) < 1: + return False + except: + return False + + # If we've made it this far, then everything is good to go! Woohoo! + return hostname diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..6a7319d2 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,830 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "23.12.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "blinker" +version = "1.7.0" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.8" +files = [ + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, +] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.4.0" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "flask" +version = "3.0.0" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.0-py3-none-any.whl", hash = "sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638"}, + {file = "flask-3.0.0.tar.gz", hash = "sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "identify" +version = "2.5.33" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pep8" +version = "1.7.1" +description = "Python style guide checker" +optional = false +python-versions = "*" +files = [ + {file = "pep8-1.7.1-py2.py3-none-any.whl", hash = "sha256:b22cfae5db09833bb9bd7c8463b53e1a9c9b39f12e304a8d0bba729c501827ee"}, + {file = "pep8-1.7.1.tar.gz", hash = "sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374"}, +] + +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pre-commit" +version = "3.6.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + +[[package]] +name = "publicsuffixlist" +version = "0.10.0.20231214" +description = "publicsuffixlist implement" +optional = false +python-versions = ">=2.6" +files = [ + {file = "publicsuffixlist-0.10.0.20231214-py2.py3-none-any.whl", hash = "sha256:10e227902e3b2acefb604b5de8a8a7d3df237f2885f06762d47fdbc9e0528b67"}, + {file = "publicsuffixlist-0.10.0.20231214.tar.gz", hash = "sha256:76a2ed46814f091ea867fb40a6c20c142a437af7aae7ac8eb425ddc464bcb8e1"}, +] + +[package.extras] +readme = ["pandoc"] +update = ["requests"] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pynose" +version = "1.4.8" +description = "pynose fixes nose to extend unittest and make testing easier" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pynose-1.4.8-py3-none-any.whl", hash = "sha256:caecaa293c3be2047786331c10e29649b47fade9244293207e20db00cf156843"}, + {file = "pynose-1.4.8.tar.gz", hash = "sha256:c8c1d500f5b64693432520438124c0fd016bfe30826cc3d34848e83e11dd0d02"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "69.0.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "urllib3" +version = "2.1.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uwsgi" +version = "2.0.23" +description = "The uWSGI server" +optional = false +python-versions = "*" +files = [ + {file = "uwsgi-2.0.23.tar.gz", hash = "sha256:0cafda0c16f921db7fe42cfaf81b167cf884ee17350efbdd87d1ecece2d7de37"}, +] + +[[package]] +name = "virtualenv" +version = "20.25.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "werkzeug" +version = "3.0.1" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, + {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "08b448fa537b520f27a5a642b51b5562e1eb126604da8f7d4bccd4459afa4fe7" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..d7ae612e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,46 @@ +[tool.poetry] +name = "httpobs" +version = "0.9.3" +description = "HTTP Observatory: a set of tests and tools to scan your website for basic web hygeine." +license = "MPL-2.0" +authors = ["April King "] +maintainers = ["Leo McArdle "] + +[tool.poetry.scripts] +httpobs-local-scan = 'httpobs.scripts.scan:main' +httpobs-server = "httpobs.website.main:run" +httpobs-regen-hsts-preload = "httpobs.scanner.utils:retrieve_store_hsts_preload_list" + +[tool.poetry.dependencies] +python = "^3.11" +beautifulsoup4 = "^4.12.2" +flake8 = "^6.1.0" +pep8 = "^1.7.1" +psycopg2 = "^2.9.9" +publicsuffixlist = "^0.10.0.20231002" +requests = "^2.31.0" +Flask = "^3.0.0" +uWSGI = "^2.0.22" +pre-commit = "^3.6.0" +black = "^23.12.1" +isort = "^5.13.2" + +[tool.poetry.group.test.dependencies] +coverage = "^7.4.0" +pynose = "^1.4.8" +urllib3 = "^2.1.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.black] +target-version = ["py311"] +required-version = "23" +line-length = 120 +skip-string-normalization = true + +[tool.isort] +profile = "black" +skip_gitignore = true +line_length = 120 diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 591a6a8b..00000000 --- a/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ --r httpobs/requirements.txt --r httpobs/database/requirements.txt --r httpobs/scanner/requirements.txt --r httpobs/website/requirements.txt diff --git a/setup.py b/setup.py deleted file mode 100644 index a605959b..00000000 --- a/setup.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python3 - -import os - -from httpobs import SOURCE_URL, VERSION -from setuptools import setup, find_packages - - -__dirname = os.path.abspath(os.path.dirname(__file__)) - -with open(os.path.join(__dirname, 'README.md')) as readme: - README = readme.read() - -setup( - name='httpobs', - version=VERSION, - description='HTTP Observatory: a set of tests and tools to scan your website for basic web hygeine.', - url=SOURCE_URL, - long_description_content_type='text/markdown', - long_description=README, - classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Web Environment', - 'Framework :: Flask', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)', - 'Natural Language :: English', - 'Programming Language :: Python :: 3 :: Only', - 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', - 'Topic :: Security', - 'Topic :: Software Development :: Quality Assurance', - ], - author='April King', - author_email='april@mozilla.com', - packages=find_packages(), - include_package_data=True, - install_requires=[ - 'amqp==2.3.2', - 'beautifulsoup4==4.6.3', - 'billiard==3.5.0.4', - 'celery==4.2.1', - 'click==7.0', - 'coverage==4.5.2', - 'flake8==3.6.0', - 'httpobs-cli==1.0.2', - 'itsdangerous==1.1.0', - 'kombu==4.2.1', - 'MarkupSafe==1.1.0', - 'mccabe==0.6.1', - 'nose==1.3.7', - 'pep8==1.7.1', - 'pycodestyle==2.4.0', - 'pyflakes==2.0.0', - 'pytz==2018.7', - 'vine==1.1.4', - 'Werkzeug==0.14.1', - 'psycopg2>=2.7,<2.8', - 'redis==2.10.6', - 'psutil==5.9.0', - 'publicsuffixlist==0.7.12', - 'requests==2.27.1', - 'Flask==1.0.2', - 'uWSGI==2.0.17.1' - ], - scripts=['httpobs/scripts/httpobs-local-scan', - 'httpobs/scripts/httpobs-mass-scan', - 'httpobs/scripts/httpobs-scan-worker'], - zip_safe=False, -)