From 481706d58bb7ece2c23fb701dc0a4a69fd0e6d69 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 6 Oct 2023 14:28:06 +0000 Subject: [PATCH 01/31] migrate to poetry and upgrade deps --- .flake8 | 9 +- .github/workflows/test.yml | 8 +- httpobs/Dockerfile => Dockerfile | 14 +- docker-compose.yml | 4 +- httpobs/database/celeryconfig.py | 20 +- httpobs/database/database.py | 4 +- httpobs/database/requirements.txt | 2 - httpobs/requirements.txt | 19 - httpobs/scanner/analyzer/headers.py | 2 +- httpobs/scanner/celeryconfig.py | 18 +- httpobs/scanner/local.py | 4 +- httpobs/scanner/requirements.txt | 3 - httpobs/scripts/httpobs-scan-worker | 8 +- httpobs/website/decorators.py | 2 +- httpobs/website/requirements.txt | 2 - poetry.lock | 881 ++++++++++++++++++++++++++++ pyproject.toml | 28 + requirements.txt | 4 - 18 files changed, 959 insertions(+), 73 deletions(-) rename httpobs/Dockerfile => Dockerfile (55%) delete mode 100644 httpobs/database/requirements.txt delete mode 100644 httpobs/requirements.txt delete mode 100644 httpobs/scanner/requirements.txt delete mode 100644 httpobs/website/requirements.txt create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 requirements.txt diff --git a/.flake8 b/.flake8 index f6ed7bb9..e30b7685 100644 --- a/.flake8 +++ b/.flake8 @@ -2,6 +2,9 @@ exclude = .flake8 .git -ignore = E722, # ignore bare excepts until I can get around to fixing them - W504 # line break after binary operator -max-line-length = 119 +ignore = + # ignore bare excepts until I can get around to fixing them + E722, + # line break after binary operator + W504, +max-line-length = 120 diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d9873504..81dd9022 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,9 +2,11 @@ on: push: branches: - master + - next pull_request: branches: - master + - next name: Run tests @@ -14,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [ '3.6', '3.7' ] + python-version: [ '3.11' ] env: HTTPOBS_BROKER_URL: fakebrokerurl HTTPOBS_DATABASE_HOST: fakehost @@ -28,7 +30,9 @@ jobs: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install . -r requirements.txt + pip install poetry + poetry config virtualenvs.create false + poetry install - name: Run flake8 tests run: flake8 --config .flake8 httpobs - name: Run nose tests diff --git a/httpobs/Dockerfile b/Dockerfile similarity index 55% rename from httpobs/Dockerfile rename to Dockerfile index 1b9e761c..49590014 100644 --- a/httpobs/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ # http-observatory -FROM python:3.5 +FROM python:3.11 MAINTAINER https://github.com/mozilla/http-observatory RUN groupadd --gid 1001 app && \ @@ -9,13 +9,13 @@ RUN install -o app -g app -d /var/run/httpobs /var/log/httpobs WORKDIR /app -COPY . httpobs +COPY pyproject.toml poetry.lock . +RUN pip install poetry && \ + poetry config virtualenvs.create false && \ + poetry install -RUN pip install --upgrade --no-cache-dir \ - -r httpobs/requirements.txt \ - -r httpobs/database/requirements.txt \ - -r httpobs/scanner/requirements.txt \ - -r httpobs/website/requirements.txt +COPY httpobs httpobs +RUN poetry install --no-dev ENV PYTHONPATH $PYTHONPATH:/app diff --git a/docker-compose.yml b/docker-compose.yml index be3c214c..31740c0d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,7 +1,7 @@ version: '2' services: website: - build: ./httpobs + build: . command: uwsgi --http :57001 --wsgi-file /app/httpobs/website/main.py --processes 8 --callable app --master depends_on: - postgres @@ -18,7 +18,7 @@ services: # celery task for scanner scanner: - build: ./httpobs + build: . command: /app/httpobs/scripts/httpobs-scan-worker depends_on: - postgres diff --git a/httpobs/database/celeryconfig.py b/httpobs/database/celeryconfig.py index d58b2a2a..a2bc875d 100644 --- a/httpobs/database/celeryconfig.py +++ b/httpobs/database/celeryconfig.py @@ -1,19 +1,19 @@ -from httpobs.conf import BROKER_URL +from httpobs.conf import BROKER_URL as broker_url # Set the Celery task queue -BROKER_URL = BROKER_URL +broker_url = broker_url -CELERY_ACCEPT_CONTENT = ['json'] -CELERY_IGNORE_RESULTS = True -CELERY_REDIRECT_STDOUTS_LEVEL = 'WARNING' -CELERY_RESULT_SERIALIZER = 'json' -CELERY_TASK_SERIALIZER = 'json' +accept_content = ['json'] +task_ignore_resultS = True +worker_redirect_stdouts_level = 'WARNING' +result_serializer = 'json' +task_serializer = 'json' -CELERYD_TASK_SOFT_TIME_LIMIT = 300 -CELERYD_TASK_TIME_LIMIT = 600 +task_soft_time_limit = 300 +task_time_limit = 600 -CELERYBEAT_SCHEDULE = { +beat_schedule = { 'abort-broken-scans': { 'task': 'httpobs.database.tasks.abort_broken_scans', 'schedule': 1800, diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 53be2c73..65070f7c 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -227,14 +227,14 @@ def select_scan_host_history(site_id: int) -> list: (site_id, STATE_FINISHED)) if cur.rowcount > 0: - return([ + return [ { 'scan_id': row['id'], 'grade': row['grade'], 'score': row['score'], 'end_time': row['end_time'], 'end_time_unix_timestamp': int(row['end_time'].timestamp()) - } for row in cur.fetchall()]) + } for row in cur.fetchall()] else: return [] diff --git a/httpobs/database/requirements.txt b/httpobs/database/requirements.txt deleted file mode 100644 index 5641d61b..00000000 --- a/httpobs/database/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -psycopg2>=2.7,<2.8 --no-binary psycopg2 # By constraining psycopg2 to less than 2.8 we can't use Python 3.8 due to https://github.com/psycopg/psycopg2/issues/854#issuecomment-611791946 -redis==2.10.6 diff --git a/httpobs/requirements.txt b/httpobs/requirements.txt deleted file mode 100644 index 7f2f3340..00000000 --- a/httpobs/requirements.txt +++ /dev/null @@ -1,19 +0,0 @@ -amqp==2.3.2 -beautifulsoup4==4.6.3 -billiard==3.5.0.4 -celery==4.2.1 -click==7.0 -coverage==4.5.2 -flake8==3.6.0 -httpobs-cli==1.0.2 -itsdangerous==1.1.0 -kombu==4.2.1 -MarkupSafe==1.1.0 -mccabe==0.6.1 -nose==1.3.7 -pep8==1.7.1 -pycodestyle==2.4.0 -pyflakes==2.0.0 -pytz==2018.7 -vine==1.1.4 -Werkzeug==0.14.1 diff --git a/httpobs/scanner/analyzer/headers.py b/httpobs/scanner/analyzer/headers.py index 1bc69b9a..a6b8cdee 100644 --- a/httpobs/scanner/analyzer/headers.py +++ b/httpobs/scanner/analyzer/headers.py @@ -398,7 +398,7 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> # There are certain cookies we ignore, because they are set by service providers and sites have # no control over them. for cookie in COOKIES_TO_DELETE: - del(session.cookies[cookie]) + del session.cookies[cookie] for cookie in session.cookies: # The HttpOnly and SameSite functionality is a bit broken diff --git a/httpobs/scanner/celeryconfig.py b/httpobs/scanner/celeryconfig.py index fde3868a..e28dea7d 100644 --- a/httpobs/scanner/celeryconfig.py +++ b/httpobs/scanner/celeryconfig.py @@ -1,14 +1,14 @@ -from httpobs.conf import BROKER_URL +from httpobs.conf import BROKER_URL as broker_url # Set the Celery task queue -BROKER_URL = BROKER_URL +broker_url = broker_url -CELERY_ACCEPT_CONTENT = ['json'] -CELERY_IGNORE_RESULTS = True -CELERY_REDIRECT_STDOUTS_LEVEL = 'WARNING' -CELERY_RESULT_SERIALIZER = 'json' -CELERY_TASK_SERIALIZER = 'json' +accept_content = ['json'] +task_ignore_resultS = True +worker_redirect_stdouts_level = 'WARNING' +result_serializer = 'json' +task_serializer = 'json' -CELERYD_TASK_SOFT_TIME_LIMIT = 751 -CELERYD_TASK_TIME_LIMIT = 1129 +task_soft_time_limit = 751 +task_time_limit = 1129 diff --git a/httpobs/scanner/local.py b/httpobs/scanner/local.py index d10ea07c..3cb86e6b 100644 --- a/httpobs/scanner/local.py +++ b/httpobs/scanner/local.py @@ -63,7 +63,7 @@ def scan(hostname, **kwargs): tests_passed = sum([1 if result.get('pass') else 0 for result in results]) # Return the results - return({ + return { 'scan': { 'grade': grades[1], 'likelihood_indicator': grades[2], @@ -74,4 +74,4 @@ def scan(hostname, **kwargs): 'tests_quantity': NUM_TESTS, }, 'tests': {result.pop('name'): result for result in results} - }) + } diff --git a/httpobs/scanner/requirements.txt b/httpobs/scanner/requirements.txt deleted file mode 100644 index a27faf57..00000000 --- a/httpobs/scanner/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -psutil==5.9.0 -publicsuffixlist==0.7.12 -requests==2.27.1 diff --git a/httpobs/scripts/httpobs-scan-worker b/httpobs/scripts/httpobs-scan-worker index f5d39040..e9391149 100755 --- a/httpobs/scripts/httpobs-scan-worker +++ b/httpobs/scripts/httpobs-scan-worker @@ -26,15 +26,15 @@ fi # Execute celery celery \ -A httpobs.scanner.tasks \ - --autoscale=$CONCURRENCY,4 \ --broker=$HTTPOBS_BROKER_URL \ - --detach \ + worker \ + --autoscale=$CONCURRENCY,4 \ --hostname='scanner@%h' \ --logfile='/var/log/httpobs/scanner.log' \ --loglevel=$LOGLEVEL \ - --maxtasksperchild=16 \ + --max-tasks-per-child=16 \ --pidfile='/var/run/httpobs/scanner.pid' \ -worker + & # Run the scanner python3 -u httpobs/scanner/main.py >> /var/log/httpobs/scan-worker.log 2>&1 diff --git a/httpobs/website/decorators.py b/httpobs/website/decorators.py index 23c347d7..cb879ee0 100644 --- a/httpobs/website/decorators.py +++ b/httpobs/website/decorators.py @@ -70,7 +70,7 @@ def wrapper(*args, **kwargs): # Remove 'error' if it's null if output['error'] is None: - del(output['error']) + del output['error'] # Delete any other things that might have made their way into the results output = {k: output[k] for k in SCAN_VALID_KEYS if k in output} diff --git a/httpobs/website/requirements.txt b/httpobs/website/requirements.txt deleted file mode 100644 index 3a26679c..00000000 --- a/httpobs/website/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -Flask==1.0.2 -uWSGI==2.0.17.1 diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..62c2e2fc --- /dev/null +++ b/poetry.lock @@ -0,0 +1,881 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "amqp" +version = "5.1.1" +description = "Low-level AMQP client for Python (fork of amqplib)." +optional = false +python-versions = ">=3.6" +files = [ + {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, + {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, +] + +[package.dependencies] +vine = ">=5.0.0" + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.6.3" +description = "Screen-scraping library" +optional = false +python-versions = "*" +files = [ + {file = "beautifulsoup4-4.6.3-py2-none-any.whl", hash = "sha256:f0abd31228055d698bb392a826528ea08ebb9959e6bea17c606fd9c9009db938"}, + {file = "beautifulsoup4-4.6.3-py3-none-any.whl", hash = "sha256:194ec62a25438adcb3fdb06378b26559eda1ea8a747367d34c33cef9c7f48d57"}, + {file = "beautifulsoup4-4.6.3.tar.gz", hash = "sha256:90f8e61121d6ae58362ce3bed8cd997efb00c914eae0ff3d363c32f9a9822d10"}, +] + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "billiard" +version = "4.1.0" +description = "Python multiprocessing fork with improvements and bugfixes" +optional = false +python-versions = ">=3.7" +files = [ + {file = "billiard-4.1.0-py3-none-any.whl", hash = "sha256:0f50d6be051c6b2b75bfbc8bfd85af195c5739c281d3f5b86a5640c65563614a"}, + {file = "billiard-4.1.0.tar.gz", hash = "sha256:1ad2eeae8e28053d729ba3373d34d9d6e210f6e4d8bf0a9c64f92bd053f1edf5"}, +] + +[[package]] +name = "blinker" +version = "1.6.2" +description = "Fast, simple object-to-object and broadcast signaling" +optional = false +python-versions = ">=3.7" +files = [ + {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, + {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, +] + +[[package]] +name = "celery" +version = "5.3.4" +description = "Distributed Task Queue." +optional = false +python-versions = ">=3.8" +files = [ + {file = "celery-5.3.4-py3-none-any.whl", hash = "sha256:1e6ed40af72695464ce98ca2c201ad0ef8fd192246f6c9eac8bba343b980ad34"}, + {file = "celery-5.3.4.tar.gz", hash = "sha256:9023df6a8962da79eb30c0c84d5f4863d9793a466354cc931d7f72423996de28"}, +] + +[package.dependencies] +billiard = ">=4.1.0,<5.0" +click = ">=8.1.2,<9.0" +click-didyoumean = ">=0.3.0" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = ">=5.3.2,<6.0" +python-dateutil = ">=2.8.2" +tzdata = ">=2022.7" +vine = ">=5.0.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=2.0.2)"] +auth = ["cryptography (==41.0.3)"] +azureblockblob = ["azure-storage-blob (>=12.15.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (>=3.25.0,<4)"] +consul = ["python-consul2 (==0.1.5)"] +cosmosdbsql = ["pydocumentdb (==2.3.5)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb (==1.14.2)"] +django = ["Django (>=2.2.28)"] +dynamodb = ["boto3 (>=1.26.143)"] +elasticsearch = ["elasticsearch (<8.0)"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +memcache = ["pylibmc (==1.6.3)"] +mongodb = ["pymongo[srv] (>=4.0.2)"] +msgpack = ["msgpack (==1.0.5)"] +pymemcache = ["python-memcached (==1.59)"] +pyro = ["pyro4 (==4.82)"] +pytest = ["pytest-celery (==0.0.0)"] +redis = ["redis (>=4.5.2,!=4.5.5,<5.0.0)"] +s3 = ["boto3 (>=1.26.143)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem (==4.1.4)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard (==0.21.0)"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, + {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, + {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, + {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, + {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, + {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, + {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, + {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-didyoumean" +version = "0.3.0" +description = "Enables git-like *did-you-mean* feature in click" +optional = false +python-versions = ">=3.6.2,<4.0.0" +files = [ + {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, + {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, +] + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +optional = false +python-versions = "*" +files = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.3.0" +description = "REPL plugin for Click" +optional = false +python-versions = ">=3.6" +files = [ + {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, + {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, +] + +[package.dependencies] +click = ">=7.0" +prompt-toolkit = ">=3.0.36" + +[package.extras] +testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.3.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, + {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, + {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, + {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, + {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, + {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, + {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, + {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, + {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, + {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, + {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, + {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, + {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, + {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, + {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, + {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, + {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, + {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, + {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, + {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, + {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, + {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, + {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, + {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, + {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, + {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, + {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, + {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "flask" +version = "3.0.0" +description = "A simple framework for building complex web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "flask-3.0.0-py3-none-any.whl", hash = "sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638"}, + {file = "flask-3.0.0.tar.gz", hash = "sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58"}, +] + +[package.dependencies] +blinker = ">=1.6.2" +click = ">=8.1.3" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.1.2" +Jinja2 = ">=3.1.2" +Werkzeug = ">=3.0.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "httpobs-cli" +version = "1.0.2" +description = "HTTP Observatory: a command line tool to scan your website" +optional = false +python-versions = "*" +files = [ + {file = "httpobs-cli-1.0.2.tar.gz", hash = "sha256:6499b99b4298e9a217361bff7c8679954fd966891e02eb4bdbe0d4f95b02e67f"}, +] + +[package.dependencies] +requests = "*" + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.8.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +optional = false +python-versions = ">=3.7" +files = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "kombu" +version = "5.3.2" +description = "Messaging library for Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "kombu-5.3.2-py3-none-any.whl", hash = "sha256:b753c9cfc9b1e976e637a7cbc1a65d446a22e45546cd996ea28f932082b7dc9e"}, + {file = "kombu-5.3.2.tar.gz", hash = "sha256:0ba213f630a2cb2772728aef56ac6883dc3a2f13435e10048f6e97d48506dbbd"}, +] + +[package.dependencies] +amqp = ">=5.1.1,<6.0.0" +typing-extensions = {version = "*", markers = "python_version < \"3.10\""} +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.10.0)"] +azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] +confluentkafka = ["confluent-kafka (==2.1.1)"] +consul = ["python-consul2"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=4.1.1)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=4.5.2)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] +sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=2.8.0)"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "pep8" +version = "1.7.1" +description = "Python style guide checker" +optional = false +python-versions = "*" +files = [ + {file = "pep8-1.7.1-py2.py3-none-any.whl", hash = "sha256:b22cfae5db09833bb9bd7c8463b53e1a9c9b39f12e304a8d0bba729c501827ee"}, + {file = "pep8-1.7.1.tar.gz", hash = "sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374"}, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.39" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, + {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.5" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, + {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, + {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, + {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, + {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, + {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, + {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, + {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, + {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, + {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, + {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "psycopg2" +version = "2.9.9" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, +] + +[[package]] +name = "publicsuffixlist" +version = "0.10.0.20231002" +description = "publicsuffixlist implement" +optional = false +python-versions = ">=2.6" +files = [ + {file = "publicsuffixlist-0.10.0.20231002-py2.py3-none-any.whl", hash = "sha256:81990427ec5dbdc8f2620c1775d5bc47ba54fe44b4e64797d06040d708d67171"}, + {file = "publicsuffixlist-0.10.0.20231002.tar.gz", hash = "sha256:a8ef3f5745196fd956bcf6f425b5000450896c616ee6e95130e147e2fae10ccc"}, +] + +[package.extras] +readme = ["pandoc"] +update = ["requests"] + +[[package]] +name = "pycodestyle" +version = "2.11.0" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, + {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, +] + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pynose" +version = "1.4.8" +description = "pynose fixes nose to extend unittest and make testing easier" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pynose-1.4.8-py3-none-any.whl", hash = "sha256:caecaa293c3be2047786331c10e29649b47fade9244293207e20db00cf156843"}, + {file = "pynose-1.4.8.tar.gz", hash = "sha256:c8c1d500f5b64693432520438124c0fd016bfe30826cc3d34848e83e11dd0d02"}, +] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "redis" +version = "5.0.1" +description = "Python client for Redis database and key-value store" +optional = false +python-versions = ">=3.7" +files = [ + {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, + {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, +] + +[package.dependencies] +async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} + +[package.extras] +hiredis = ["hiredis (>=1.0.0)"] +ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "typing-extensions" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "urllib3" +version = "2.0.6" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, + {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uwsgi" +version = "2.0.22" +description = "The uWSGI server" +optional = false +python-versions = "*" +files = [ + {file = "uwsgi-2.0.22.tar.gz", hash = "sha256:4cc4727258671ac5fa17ab422155e9aaef8a2008ebb86e4404b66deaae965db2"}, +] + +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +optional = false +python-versions = ">=3.6" +files = [ + {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, + {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.8" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, + {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, +] + +[[package]] +name = "werkzeug" +version = "3.0.0" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, + {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "zipp" +version = "3.17.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, + {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "fb3bbc44a8646402319ec9f5cea011a30a9aa424999db398142620e6835d82f5" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..5bbfc152 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,28 @@ +[tool.poetry] +name = "httpobs" +version = "0.9.3" +description = "HTTP Observatory: a set of tests and tools to scan your website for basic web hygeine." +license = "MPL-2.0" +authors = ["April King "] +maintainers = ["Leo McArdle "] + +[tool.poetry.dependencies] +python = "^3.11" +beautifulsoup4 = "4.6.3" +celery = "^5.3.4" +coverage = "^7.3.2" +flake8 = "^6.1.0" +httpobs-cli = "^1.0.2" +pynose = "^1.4.8" +pep8 = "^1.7.1" +psycopg2 = "^2.9.9" +redis = "^5.0.1" +psutil = "^5.9.5" +publicsuffixlist = "^0.10.0.20231002" +requests = "^2.31.0" +Flask = "^3.0.0" +uWSGI = "^2.0.22" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 591a6a8b..00000000 --- a/requirements.txt +++ /dev/null @@ -1,4 +0,0 @@ --r httpobs/requirements.txt --r httpobs/database/requirements.txt --r httpobs/scanner/requirements.txt --r httpobs/website/requirements.txt From 5a65663164879833e16f331c8bf4a892d114fe01 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 6 Oct 2023 14:35:54 +0000 Subject: [PATCH 02/31] missed upgrading beautifulsoup4 --- poetry.lock | 62 ++++++++++++-------------------------------------- pyproject.toml | 2 +- 2 files changed, 15 insertions(+), 49 deletions(-) diff --git a/poetry.lock b/poetry.lock index 62c2e2fc..2d2aefc1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -27,16 +27,18 @@ files = [ [[package]] name = "beautifulsoup4" -version = "4.6.3" +version = "4.12.2" description = "Screen-scraping library" optional = false -python-versions = "*" +python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.6.3-py2-none-any.whl", hash = "sha256:f0abd31228055d698bb392a826528ea08ebb9959e6bea17c606fd9c9009db938"}, - {file = "beautifulsoup4-4.6.3-py3-none-any.whl", hash = "sha256:194ec62a25438adcb3fdb06378b26559eda1ea8a747367d34c33cef9c7f48d57"}, - {file = "beautifulsoup4-4.6.3.tar.gz", hash = "sha256:90f8e61121d6ae58362ce3bed8cd997efb00c914eae0ff3d363c32f9a9822d10"}, + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, ] +[package.dependencies] +soupsieve = ">1.2" + [package.extras] html5lib = ["html5lib"] lxml = ["lxml"] @@ -396,7 +398,6 @@ files = [ [package.dependencies] blinker = ">=1.6.2" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" Werkzeug = ">=3.0.0" @@ -429,25 +430,6 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] -[[package]] -name = "importlib-metadata" -version = "6.8.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, - {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - [[package]] name = "itsdangerous" version = "2.1.2" @@ -489,7 +471,6 @@ files = [ [package.dependencies] amqp = ">=5.1.1,<6.0.0" -typing-extensions = {version = "*", markers = "python_version < \"3.10\""} vine = "*" [package.extras] @@ -773,14 +754,14 @@ files = [ ] [[package]] -name = "typing-extensions" -version = "4.8.0" -description = "Backported and Experimental Type Hints for Python 3.8+" +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] [[package]] @@ -860,22 +841,7 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] -[[package]] -name = "zipp" -version = "3.17.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - [metadata] lock-version = "2.0" -python-versions = "^3.9" -content-hash = "fb3bbc44a8646402319ec9f5cea011a30a9aa424999db398142620e6835d82f5" +python-versions = "^3.11" +content-hash = "99ae424917bed4065ef3bcbd1e58b0768885e5d888450b134ec1423b28b26ef8" diff --git a/pyproject.toml b/pyproject.toml index 5bbfc152..0009ef8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,7 +8,7 @@ maintainers = ["Leo McArdle "] [tool.poetry.dependencies] python = "^3.11" -beautifulsoup4 = "4.6.3" +beautifulsoup4 = "^4.12.2" celery = "^5.3.4" coverage = "^7.3.2" flake8 = "^6.1.0" From 627d6586089fc4473a48d907e680c87705b30322 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 13 Oct 2023 15:51:53 +0000 Subject: [PATCH 03/31] remove no-longer used celery config from website re-order docker-compose file to group related services --- docker-compose.yml | 16 +++++++--------- httpobs/database/__init__.py | 1 - httpobs/database/celeryconfig.py | 21 --------------------- 3 files changed, 7 insertions(+), 31 deletions(-) delete mode 100644 httpobs/database/celeryconfig.py diff --git a/docker-compose.yml b/docker-compose.yml index 31740c0d..d2b6fef1 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,7 +6,6 @@ services: depends_on: - postgres environment: - - HTTPOBS_BROKER_URL=redis://redis:6379/0 - HTTPOBS_DATABASE_HOST=postgres - HTTPOBS_DATABASE_PASS=httpobsapipassword - HTTPOBS_DATABASE_USER=httpobsapi @@ -16,7 +15,13 @@ services: - "57001:57001" restart: always - # celery task for scanner + postgres: + build: ./httpobs/database + environment: + - POSTGRES_USER=httpobs + - POSTGRES_PASSWORD=totallyfakepassword + - POSTGRES_DB=http_observatory + scanner: build: . command: /app/httpobs/scripts/httpobs-scan-worker @@ -33,12 +38,5 @@ services: - postgres - redis - postgres: - build: ./httpobs/database - environment: - - POSTGRES_USER=httpobs - - POSTGRES_PASSWORD=totallyfakepassword - - POSTGRES_DB=http_observatory - redis: image: redis diff --git a/httpobs/database/__init__.py b/httpobs/database/__init__.py index 76f8ea46..5318897d 100644 --- a/httpobs/database/__init__.py +++ b/httpobs/database/__init__.py @@ -16,7 +16,6 @@ update_scans_dequeue_scans) __all__ = [ - 'abort_broken_scans', 'get_cursor', 'insert_scan', 'insert_scan_grade', diff --git a/httpobs/database/celeryconfig.py b/httpobs/database/celeryconfig.py deleted file mode 100644 index a2bc875d..00000000 --- a/httpobs/database/celeryconfig.py +++ /dev/null @@ -1,21 +0,0 @@ -from httpobs.conf import BROKER_URL as broker_url - - -# Set the Celery task queue -broker_url = broker_url - -accept_content = ['json'] -task_ignore_resultS = True -worker_redirect_stdouts_level = 'WARNING' -result_serializer = 'json' -task_serializer = 'json' - -task_soft_time_limit = 300 -task_time_limit = 600 - -beat_schedule = { - 'abort-broken-scans': { - 'task': 'httpobs.database.tasks.abort_broken_scans', - 'schedule': 1800, - } -} From 0cf8f01bd02d8dee6fa4a92b5528a93afb92dd6a Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 17:25:33 +0000 Subject: [PATCH 04/31] add pre-commit --- .pre-commit-config.yaml | 14 +++ poetry.lock | 198 +++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 3 files changed, 211 insertions(+), 2 deletions(-) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..d96d8f47 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,14 @@ +default_language_version: + python: python3.11 +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + hooks: + - id: flake8 diff --git a/poetry.lock b/poetry.lock index 2d2aefc1..a46b420b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "amqp" @@ -131,6 +131,17 @@ files = [ {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "3.3.0" @@ -368,6 +379,33 @@ files = [ [package.extras] toml = ["tomli"] +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + [[package]] name = "flake8" version = "6.1.0" @@ -419,6 +457,20 @@ files = [ [package.dependencies] requests = "*" +[[package]] +name = "identify" +version = "2.5.33" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.4" @@ -570,6 +622,20 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + [[package]] name = "pep8" version = "1.7.1" @@ -581,6 +647,39 @@ files = [ {file = "pep8-1.7.1.tar.gz", hash = "sha256:fe249b52e20498e59e0b5c5256aa52ee99fc295b26ec9eaa85776ffdb9fe6374"}, ] +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pre-commit" +version = "3.6.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "prompt-toolkit" version = "3.0.39" @@ -703,6 +802,65 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + [[package]] name = "redis" version = "5.0.1" @@ -742,6 +900,22 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "setuptools" +version = "69.0.3" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, + {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" @@ -813,6 +987,26 @@ files = [ {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, ] +[[package]] +name = "virtualenv" +version = "20.25.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "wcwidth" version = "0.2.8" @@ -844,4 +1038,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "99ae424917bed4065ef3bcbd1e58b0768885e5d888450b134ec1423b28b26ef8" +content-hash = "cea7908906375262704017225710942051e457d222bde5ec6e756b38817059e2" diff --git a/pyproject.toml b/pyproject.toml index 0009ef8f..da0634d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ publicsuffixlist = "^0.10.0.20231002" requests = "^2.31.0" Flask = "^3.0.0" uWSGI = "^2.0.22" +pre-commit = "^3.6.0" [build-system] requires = ["poetry-core"] From 376413493eca8298b8f2c17c8f02fc0d04285136 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 17:38:14 +0000 Subject: [PATCH 05/31] run pre-commit against all files --- .github/workflows/test.yml | 1 - CODE_OF_CONDUCT.md | 4 ++-- httpobs/conf/hsts-preload.json | 2 +- httpobs/database/.dockerignore | 2 +- httpobs/database/Dockerfile | 2 +- httpobs/database/schema.sql | 2 +- httpobs/database/schema.sql.docker.sql | 2 +- httpobs/scripts/httpobs-local-scan | 2 +- 8 files changed, 8 insertions(+), 9 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 81dd9022..644f2fb3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -37,4 +37,3 @@ jobs: run: flake8 --config .flake8 httpobs - name: Run nose tests run: nosetests httpobs/tests -e insert_test_result -e scored_test -e select_test_results -e test_retrieve --with-coverage --cover-package=httpobs - diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 498baa3f..041fbb69 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,8 +1,8 @@ # Community Participation Guidelines -This repository is governed by Mozilla's code of conduct and etiquette guidelines. +This repository is governed by Mozilla's code of conduct and etiquette guidelines. For more details, please read the -[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). +[Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). ## How to Report For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page. diff --git a/httpobs/conf/hsts-preload.json b/httpobs/conf/hsts-preload.json index 0bb6d2e3..1c4e3ed8 100644 --- a/httpobs/conf/hsts-preload.json +++ b/httpobs/conf/hsts-preload.json @@ -1066793,4 +1066793,4 @@ "mode": "force-https", "pinned": false } -} \ No newline at end of file +} diff --git a/httpobs/database/.dockerignore b/httpobs/database/.dockerignore index 347179a0..0ebe5ca6 100644 --- a/httpobs/database/.dockerignore +++ b/httpobs/database/.dockerignore @@ -1,3 +1,3 @@ __pycache__ data -.DS_Store \ No newline at end of file +.DS_Store diff --git a/httpobs/database/Dockerfile b/httpobs/database/Dockerfile index d65135be..d8866402 100644 --- a/httpobs/database/Dockerfile +++ b/httpobs/database/Dockerfile @@ -2,4 +2,4 @@ FROM postgres ADD schema.sql /docker-entrypoint-initdb.d/ -ADD schema.sql.docker.sql /docker-entrypoint-initdb.d/ \ No newline at end of file +ADD schema.sql.docker.sql /docker-entrypoint-initdb.d/ diff --git a/httpobs/database/schema.sql b/httpobs/database/schema.sql index b55d3022..846858f5 100644 --- a/httpobs/database/schema.sql +++ b/httpobs/database/schema.sql @@ -175,4 +175,4 @@ ALTER MATERIALIZED VIEW latest_tests OWNER TO httpobsscanner; /* ALTER TABLE scans ADD COLUMN algorithm_version SMALLINT NOT NULL DEFAULT 1; CREATE INDEX scans_algorithm_version_idx ON scans (algorithm_version); -*/ \ No newline at end of file +*/ diff --git a/httpobs/database/schema.sql.docker.sql b/httpobs/database/schema.sql.docker.sql index f6bbbd45..48710b82 100644 --- a/httpobs/database/schema.sql.docker.sql +++ b/httpobs/database/schema.sql.docker.sql @@ -1,4 +1,4 @@ /* silly alphabetical naming requirements */ ALTER ROLE httpobsapi LOGIN PASSWORD 'httpobsapipassword'; -ALTER ROLE httpobsscanner LOGIN PASSWORD 'httpobsscannerpassword'; \ No newline at end of file +ALTER ROLE httpobsscanner LOGIN PASSWORD 'httpobsscannerpassword'; diff --git a/httpobs/scripts/httpobs-local-scan b/httpobs/scripts/httpobs-local-scan index a7163ea3..e44ed1bb 100755 --- a/httpobs/scripts/httpobs-local-scan +++ b/httpobs/scripts/httpobs-local-scan @@ -65,7 +65,7 @@ if __name__ == "__main__": # Because it makes sense this way if args['http_port'] == 80: - del(args['http_port']) + del (args['http_port']) if args['https_port'] == 443: del (args['https_port']) From a35d4a1bf533b3d9d99efcfa50beb5d3c521a3ee Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 17:40:19 +0000 Subject: [PATCH 06/31] add black formatter --- .flake8 | 6 ++-- .pre-commit-config.yaml | 4 +++ poetry.lock | 79 ++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 7 ++++ 4 files changed, 92 insertions(+), 4 deletions(-) diff --git a/.flake8 b/.flake8 index e30b7685..29425516 100644 --- a/.flake8 +++ b/.flake8 @@ -2,9 +2,9 @@ exclude = .flake8 .git -ignore = +extend-ignore = + # black compatibility + E203, E704, # ignore bare excepts until I can get around to fixing them E722, - # line break after binary operator - W504, max-line-length = 120 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d96d8f47..b855a7fa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,6 +8,10 @@ repos: - id: end-of-file-fixer - id: check-yaml - id: check-added-large-files + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 23.12.1 + hooks: + - id: black - repo: https://github.com/pycqa/flake8 rev: 6.1.0 hooks: diff --git a/poetry.lock b/poetry.lock index a46b420b..43092451 100644 --- a/poetry.lock +++ b/poetry.lock @@ -54,6 +54,50 @@ files = [ {file = "billiard-4.1.0.tar.gz", hash = "sha256:1ad2eeae8e28053d729ba3373d34d9d6e210f6e4d8bf0a9c64f92bd053f1edf5"}, ] +[[package]] +name = "black" +version = "23.12.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "blinker" version = "1.6.2" @@ -622,6 +666,17 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "nodeenv" version = "1.8.0" @@ -636,6 +691,28 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + [[package]] name = "pep8" version = "1.7.1" @@ -1038,4 +1115,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "cea7908906375262704017225710942051e457d222bde5ec6e756b38817059e2" +content-hash = "0e91d0f8f44fe626d8a2b25316aa26aaeeb58792af6602245b7f0183c74f8457" diff --git a/pyproject.toml b/pyproject.toml index da0634d3..d0a9ea36 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,14 @@ requests = "^2.31.0" Flask = "^3.0.0" uWSGI = "^2.0.22" pre-commit = "^3.6.0" +black = "^23.12.1" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" + +[tool.black] +target-version = ["py311"] +required-version = "23" +line-length = 120 +skip-string-normalization = true From 1583d515c0a767e2b69b3cdcd785008d2dd0cd0b Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 17:54:53 +0000 Subject: [PATCH 07/31] format all files with black --- httpobs/conf/__init__.py | 93 +- httpobs/database/__init__.py | 34 +- httpobs/database/database.py | 171 ++- httpobs/scanner/analyzer/__init__.py | 18 +- httpobs/scanner/analyzer/content.py | 76 +- httpobs/scanner/analyzer/headers.py | 228 ++-- httpobs/scanner/analyzer/misc.py | 32 +- httpobs/scanner/grader/__init__.py | 24 +- httpobs/scanner/grader/grade.py | 98 +- httpobs/scanner/local.py | 2 +- httpobs/scanner/main.py | 154 ++- httpobs/scanner/retriever/retriever.py | 50 +- httpobs/scanner/tasks.py | 24 +- httpobs/scanner/utils.py | 32 +- httpobs/scripts/httpobs-local-scan | 60 +- httpobs/scripts/httpobs-mass-scan | 7 +- httpobs/tests/unittests/test_content.py | 12 +- httpobs/tests/unittests/test_csp_parser.py | 86 +- httpobs/tests/unittests/test_grades.py | 6 +- httpobs/tests/unittests/test_headers.py | 1035 +++++++++-------- httpobs/tests/unittests/test_misc.py | 46 +- .../test_parse_http_equiv_headers.py | 18 +- httpobs/tests/unittests/test_retriever.py | 2 +- .../tests/unittests/test_sanitize_headers.py | 10 +- httpobs/website/__init__.py | 3 +- httpobs/website/api.py | 72 +- httpobs/website/decorators.py | 52 +- httpobs/website/main.py | 3 +- httpobs/website/monitoring.py | 3 +- setup.py | 10 +- 30 files changed, 1342 insertions(+), 1119 deletions(-) diff --git a/httpobs/conf/__init__.py b/httpobs/conf/__init__.py index 72a183df..2401c9a8 100644 --- a/httpobs/conf/__init__.py +++ b/httpobs/conf/__init__.py @@ -8,8 +8,8 @@ # Read in the default config file if /etc/httpobs.conf doesn't already exist __dirname = os.path.abspath(os.path.dirname(__file__)) _config_parser = configparser.ConfigParser() -_config_parser.read_file(open(os.path.join(__dirname, 'httpobs.conf'))) # default values -_config_parser.read(['/etc/httpobs.conf', os.path.expanduser('~/.httpobs.conf')]) # overridden values +_config_parser.read_file(open(os.path.join(__dirname, 'httpobs.conf'))) # default values +_config_parser.read(['/etc/httpobs.conf', os.path.expanduser('~/.httpobs.conf')]) # overridden values # Return None if it's not in the config parser @@ -38,17 +38,21 @@ def __conf(section, param, type=None, default=None): DEVELOPMENT_MODE = True if environ.get('HTTPOBS_DEV') == 'yes' else False or __conf('global', 'development', bool) # API configuration -API_ALLOW_VERBOSE_STATS_FROM_PUBLIC = (environ.get('HTTPOBS_ALLOW_VERBOSE_STATS_FROM_PUBLIC') == 'yes' or - __conf('api', 'allow_verbose_stats_from_public', bool, True)) +API_ALLOW_VERBOSE_STATS_FROM_PUBLIC = environ.get('HTTPOBS_ALLOW_VERBOSE_STATS_FROM_PUBLIC') == 'yes' or __conf( + 'api', 'allow_verbose_stats_from_public', bool, True +) API_CACHED_RESULT_TIME = int(environ.get('HTTPOBS_API_CACHED_RESULT_TIME') or __conf('api', 'cached_result_time')) API_COOLDOWN = int(environ.get('HTTPOBS_API_COOLDOWN') or __conf('api', 'cooldown', int)) API_PORT = int(environ.get('HTTPOBS_API_PORT') or __conf('api', 'port', int)) -API_PROPAGATE_EXCEPTIONS = (True if environ.get('HTTPOBS_PROPAGATE_EXCEPTIONS') == 'yes' else False or - __conf('api', 'propagate_exceptions', bool)) +API_PROPAGATE_EXCEPTIONS = ( + True + if environ.get('HTTPOBS_PROPAGATE_EXCEPTIONS') == 'yes' + else False or __conf('api', 'propagate_exceptions', bool) +) API_URL = environ.get('HTTPOBS_API_URL') or __conf('api', 'url') # Broker configuration -BROKER_URL = (environ.get('HTTPOBS_BROKER_URL') or __conf('scanner', 'broker')) +BROKER_URL = environ.get('HTTPOBS_BROKER_URL') or __conf('scanner', 'broker') # Database configuration DATABASE_DB = environ.get('HTTPOBS_DATABASE_DB') or __conf('database', 'database') @@ -66,38 +70,53 @@ def __conf(section, param, type=None, default=None): DATABASE_SSL_MODE = 'prefer' # Retriever parameters -RETRIEVER_CONNECT_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_CONNECT_TIMEOUT') or - __conf('retriever', 'connect_timeout')) -RETRIEVER_READ_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_READ_TIMEOUT') or - __conf('retriever', 'read_timeout')) +RETRIEVER_CONNECT_TIMEOUT = float( + environ.get('HTTPOBS_RETRIEVER_CONNECT_TIMEOUT') or __conf('retriever', 'connect_timeout') +) +RETRIEVER_READ_TIMEOUT = float(environ.get('HTTPOBS_RETRIEVER_READ_TIMEOUT') or __conf('retriever', 'read_timeout')) RETRIEVER_USER_AGENT = environ.get('HTTPOBS_RETRIEVER_USER_AGENT') or __conf('retriever', 'user_agent') RETRIEVER_CORS_ORIGIN = environ.get('HTTPOBS_RETRIEVER_CORS_ORIGIN') or __conf('retriever', 'cors_origin') # Scanner configuration -SCANNER_ABORT_SCAN_TIME = int(environ.get('HTTPOBS_SCANNER_ABORT_SCAN_TIME') or - __conf('scanner', 'abort_scan_time')) -SCANNER_ALLOW_KICKSTART = (environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART') == 'yes' or - __conf('scanner', 'allow_kickstart', bool)) -SCANNER_ALLOW_KICKSTART_NUM_ABORTED = int(environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART_NUM_ABORTED') or - __conf('scanner', 'allow_kickstart_num_aborted')) -SCANNER_ALLOW_LOCALHOST = (environ.get('HTTPOBS_SCANNER_ALLOW_LOCALHOST') == 'yes' or - __conf('scanner', 'allow_localhost', bool)) -SCANNER_BROKER_RECONNECTION_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_BROKER_RECONNECTION_SLEEP_TIME') or - __conf('scanner', 'broker_reconnection_sleep_time')) -SCANNER_CYCLE_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_CYCLE_SLEEP_TIME') or - __conf('scanner', 'cycle_sleep_time')) -SCANNER_DATABASE_RECONNECTION_SLEEP_TIME = float(environ.get('HTTPOBS_SCANNER_DATABASE_RECONNECTION_SLEEP_TIME') or - __conf('scanner', 'database_reconnection_sleep_time')) -SCANNER_MAINTENANCE_CYCLE_FREQUENCY = int(environ.get('HTTPOBS_MAINTENANCE_CYCLE_FREQUENCY') or - __conf('scanner', 'maintenance_cycle_frequency')) -SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY = int(environ.get('HTTPOBS_SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY') or - __conf('scanner', 'materialized_view_refresh_frequency')) -SCANNER_MAX_CPU_UTILIZATION = int(environ.get('HTTPOBS_SCANNER_MAX_CPU_UTILIZATION') or - __conf('scanner', 'max_cpu_utilization')) -SCANNER_MAX_LOAD_RATIO = int(environ.get('HTTPOBS_SCANNER_MAX_LOAD_RATIO_PER_CPU') or - __conf('scanner', 'max_load_ratio_per_cpu')) +SCANNER_ABORT_SCAN_TIME = int(environ.get('HTTPOBS_SCANNER_ABORT_SCAN_TIME') or __conf('scanner', 'abort_scan_time')) +SCANNER_ALLOW_KICKSTART = environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART') == 'yes' or __conf( + 'scanner', 'allow_kickstart', bool +) +SCANNER_ALLOW_KICKSTART_NUM_ABORTED = int( + environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART_NUM_ABORTED') or __conf('scanner', 'allow_kickstart_num_aborted') +) +SCANNER_ALLOW_LOCALHOST = environ.get('HTTPOBS_SCANNER_ALLOW_LOCALHOST') == 'yes' or __conf( + 'scanner', 'allow_localhost', bool +) +SCANNER_BROKER_RECONNECTION_SLEEP_TIME = float( + environ.get('HTTPOBS_SCANNER_BROKER_RECONNECTION_SLEEP_TIME') or __conf('scanner', 'broker_reconnection_sleep_time') +) +SCANNER_CYCLE_SLEEP_TIME = float( + environ.get('HTTPOBS_SCANNER_CYCLE_SLEEP_TIME') or __conf('scanner', 'cycle_sleep_time') +) +SCANNER_DATABASE_RECONNECTION_SLEEP_TIME = float( + environ.get('HTTPOBS_SCANNER_DATABASE_RECONNECTION_SLEEP_TIME') + or __conf('scanner', 'database_reconnection_sleep_time') +) +SCANNER_MAINTENANCE_CYCLE_FREQUENCY = int( + environ.get('HTTPOBS_MAINTENANCE_CYCLE_FREQUENCY') or __conf('scanner', 'maintenance_cycle_frequency') +) +SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY = int( + environ.get('HTTPOBS_SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY') + or __conf('scanner', 'materialized_view_refresh_frequency') +) +SCANNER_MAX_CPU_UTILIZATION = int( + environ.get('HTTPOBS_SCANNER_MAX_CPU_UTILIZATION') or __conf('scanner', 'max_cpu_utilization') +) +SCANNER_MAX_LOAD_RATIO = int( + environ.get('HTTPOBS_SCANNER_MAX_LOAD_RATIO_PER_CPU') or __conf('scanner', 'max_load_ratio_per_cpu') +) SCANNER_MAX_LOAD = cpu_count() * SCANNER_MAX_LOAD_RATIO -SCANNER_MOZILLA_DOMAINS = [domain.strip() for domain in (environ.get('HTTPOBS_SCANNER_MOZILLA_DOMAINS') or - __conf('scanner', 'mozilla_domains')).split(',')] -SCANNER_PINNED_DOMAINS = [domain.strip() for domain in (environ.get('HTTPOBS_SCANNER_PINNED_DOMAINS') or - __conf('scanner', 'pinned_domains')).split(',')] +SCANNER_MOZILLA_DOMAINS = [ + domain.strip() + for domain in (environ.get('HTTPOBS_SCANNER_MOZILLA_DOMAINS') or __conf('scanner', 'mozilla_domains')).split(',') +] +SCANNER_PINNED_DOMAINS = [ + domain.strip() + for domain in (environ.get('HTTPOBS_SCANNER_PINNED_DOMAINS') or __conf('scanner', 'pinned_domains')).split(',') +] diff --git a/httpobs/database/__init__.py b/httpobs/database/__init__.py index 5318897d..b93a9df6 100644 --- a/httpobs/database/__init__.py +++ b/httpobs/database/__init__.py @@ -1,19 +1,21 @@ -from .database import (get_cursor, - insert_scan, - insert_scan_grade, - insert_test_results, - periodic_maintenance, - refresh_materialized_views, - select_scan_host_history, - select_scan_recent_finished_scans, - select_scan_recent_scan, - select_scan_scanner_statistics, - select_site_headers, - select_site_id, - select_star_from, - select_test_results, - update_scan_state, - update_scans_dequeue_scans) +from .database import ( + get_cursor, + insert_scan, + insert_scan_grade, + insert_test_results, + periodic_maintenance, + refresh_materialized_views, + select_scan_host_history, + select_scan_recent_finished_scans, + select_scan_recent_scan, + select_scan_scanner_statistics, + select_site_headers, + select_site_id, + select_star_from, + select_test_results, + update_scan_state, + update_scans_dequeue_scans, +) __all__ = [ 'get_cursor', diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 65070f7c..5b418018 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -3,21 +3,25 @@ from types import SimpleNamespace from os import getpid -from httpobs.conf import (API_CACHED_RESULT_TIME, - DATABASE_CA_CERT, - DATABASE_DB, - DATABASE_HOST, - DATABASE_PASSWORD, - DATABASE_PORT, - DATABASE_SSL_MODE, - DATABASE_USER, - SCANNER_ABORT_SCAN_TIME) -from httpobs.scanner import (ALGORITHM_VERSION, - STATE_ABORTED, - STATE_FAILED, - STATE_FINISHED, - STATE_PENDING, - STATE_STARTING) +from httpobs.conf import ( + API_CACHED_RESULT_TIME, + DATABASE_CA_CERT, + DATABASE_DB, + DATABASE_HOST, + DATABASE_PASSWORD, + DATABASE_PORT, + DATABASE_SSL_MODE, + DATABASE_USER, + SCANNER_ABORT_SCAN_TIME, +) +from httpobs.scanner import ( + ALGORITHM_VERSION, + STATE_ABORTED, + STATE_FAILED, + STATE_FINISHED, + STATE_PENDING, + STATE_STARTING, +) from httpobs.scanner.analyzer import NUM_TESTS from httpobs.scanner.grader import get_grade_and_likelihood_for_score, MINIMUM_SCORE_FOR_EXTRA_CREDIT @@ -35,13 +39,15 @@ def __init__(self): def _connect(self): try: - self._conn = psycopg2.connect(database=DATABASE_DB, - host=DATABASE_HOST, - password=DATABASE_PASSWORD, - port=DATABASE_PORT, - sslmode=DATABASE_SSL_MODE, - sslrootcert=DATABASE_CA_CERT, - user=DATABASE_USER) + self._conn = psycopg2.connect( + database=DATABASE_DB, + host=DATABASE_HOST, + password=DATABASE_PASSWORD, + port=DATABASE_PORT, + sslmode=DATABASE_SSL_MODE, + sslrootcert=DATABASE_CA_CERT, + user=DATABASE_USER, + ) if not self._connected: print('INFO: Connected to PostgreSQL', file=sys.stderr) @@ -99,32 +105,34 @@ def get_cursor(): def insert_scan(site_id: int, hidden: bool = False) -> dict: with get_cursor() as cur: - cur.execute("""INSERT INTO scans (site_id, state, start_time, algorithm_version, tests_quantity, hidden) + cur.execute( + """INSERT INTO scans (site_id, state, start_time, algorithm_version, tests_quantity, hidden) VALUES (%s, %s, NOW(), %s, %s, %s) RETURNING *""", - (site_id, STATE_PENDING, ALGORITHM_VERSION, NUM_TESTS, hidden)) + (site_id, STATE_PENDING, ALGORITHM_VERSION, NUM_TESTS, hidden), + ) return dict(cur.fetchone()) def insert_scan_grade(scan_id, scan_grade, scan_score) -> dict: with get_cursor() as cur: - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET (grade, score) = (%s, %s) WHERE id = %s RETURNING *""", - (scan_grade, scan_score, scan_id)) + (scan_grade, scan_score, scan_id), + ) return dict(cur.fetchone()) # TODO: Separate out some of this logic so it doesn't need to be duplicated in local.scan() -def insert_test_results(site_id: int, - scan_id: int, - tests: list, - response_headers: dict, - status_code: int = None) -> dict: +def insert_test_results( + site_id: int, scan_id: int, tests: list, response_headers: dict, status_code: int = None +) -> dict: with get_cursor() as cur: tests_failed = tests_passed = 0 score_with_extra_credit = uncurved_score = 100 @@ -148,9 +156,11 @@ def insert_test_results(site_id: int, uncurved_score += score_modifier # Insert test result to the database - cur.execute("""INSERT INTO tests (site_id, scan_id, name, expectation, result, pass, output, score_modifier) + cur.execute( + """INSERT INTO tests (site_id, scan_id, name, expectation, result, pass, output, score_modifier) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)""", - (site_id, scan_id, name, expectation, result, passed, dumps(test), score_modifier)) + (site_id, scan_id, name, expectation, result, passed, dumps(test), score_modifier), + ) # Only record the full score if the uncurved score already receives an A score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score @@ -159,14 +169,25 @@ def insert_test_results(site_id: int, score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score) # Update the scans table - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET (end_time, tests_failed, tests_passed, grade, score, likelihood_indicator, state, response_headers, status_code) = (NOW(), %s, %s, %s, %s, %s, %s, %s, %s) WHERE id = %s RETURNING *""", - (tests_failed, tests_passed, grade, score, likelihood_indicator, STATE_FINISHED, - dumps(response_headers), status_code, scan_id)) + ( + tests_failed, + tests_passed, + grade, + score, + likelihood_indicator, + STATE_FINISHED, + dumps(response_headers), + status_code, + scan_id, + ), + ) row = dict(cur.fetchone()) @@ -181,13 +202,15 @@ def periodic_maintenance() -> int: """ with get_cursor() as cur: # Mark all scans that have been sitting unfinished for at least SCANNER_ABORT_SCAN_TIME as ABORTED - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET (state, end_time) = (%s, NOW()) WHERE state != %s AND state != %s AND state != %s AND start_time < NOW() - INTERVAL '%s seconds';""", - (STATE_ABORTED, STATE_ABORTED, STATE_FAILED, STATE_FINISHED, SCANNER_ABORT_SCAN_TIME)) + (STATE_ABORTED, STATE_ABORTED, STATE_FAILED, STATE_FINISHED, SCANNER_ABORT_SCAN_TIME), + ) return cur.rowcount @@ -220,11 +243,13 @@ def select_star_from(table: str) -> dict: def select_scan_host_history(site_id: int) -> list: # Get all of the site's historic scans with get_cursor() as cur: - cur.execute("""SELECT id, grade, score, end_time FROM scans + cur.execute( + """SELECT id, grade, score, end_time FROM scans WHERE site_id = %s AND state = %s ORDER BY end_time ASC;""", - (site_id, STATE_FINISHED)) + (site_id, STATE_FINISHED), + ) if cur.rowcount > 0: return [ @@ -233,8 +258,10 @@ def select_scan_host_history(site_id: int) -> list: 'grade': row['grade'], 'score': row['score'], 'end_time': row['end_time'], - 'end_time_unix_timestamp': int(row['end_time'].timestamp()) - } for row in cur.fetchall()] + 'end_time_unix_timestamp': int(row['end_time'].timestamp()), + } + for row in cur.fetchall() + ] else: return [] @@ -265,13 +292,15 @@ def select_scan_scanner_statistics(verbose: bool = False) -> dict: states = dict(cur.fetchall()) # Get the recent scan count - cur.execute("""SELECT DATE_TRUNC('hour', end_time) AS hour, COUNT(*) as num_scans + cur.execute( + """SELECT DATE_TRUNC('hour', end_time) AS hour, COUNT(*) as num_scans FROM scans WHERE (end_time < DATE_TRUNC('hour', NOW())) AND (end_time >= DATE_TRUNC('hour', NOW()) - INTERVAL '24 hours') GROUP BY hour ORDER BY hour DESC;""", - (STATE_FINISHED,)) + (STATE_FINISHED,), + ) recent_scans = dict(cur.fetchall()).items() else: recent_scans = {} @@ -292,7 +321,8 @@ def select_scan_recent_finished_scans(num_scans=10, min_score=0, max_score=100) # Used for /api/v1/getRecentScans # Fix from: https://gist.github.com/april/61efa9ff197828bf5ab13e5a00be9138 with get_cursor() as cur: - cur.execute("""SELECT sites.domain, s2.grade + cur.execute( + """SELECT sites.domain, s2.grade FROM (SELECT DISTINCT ON (s1.site_id) s1.site_id, s1.grade, s1.end_time FROM @@ -307,19 +337,22 @@ def select_scan_recent_finished_scans(num_scans=10, min_score=0, max_score=100) ORDER BY s1.site_id, s1.end_time DESC) s2 INNER JOIN sites ON (sites.id = s2.site_id) ORDER BY s2.end_time DESC LIMIT %s;""", - (STATE_FINISHED, min_score, max_score, num_scans * 2, num_scans)) + (STATE_FINISHED, min_score, max_score, num_scans * 2, num_scans), + ) return dict(cur.fetchall()) def select_scan_recent_scan(site_id: int, recent_in_seconds=API_CACHED_RESULT_TIME) -> dict: with get_cursor() as cur: - cur.execute("""SELECT * FROM scans + cur.execute( + """SELECT * FROM scans WHERE site_id = %s AND start_time >= NOW() - INTERVAL '%s seconds' ORDER BY start_time DESC LIMIT 1""", - (site_id, recent_in_seconds)) + (site_id, recent_in_seconds), + ) if cur.rowcount > 0: return dict(cur.fetchone()) @@ -330,11 +363,13 @@ def select_scan_recent_scan(site_id: int, recent_in_seconds=API_CACHED_RESULT_TI def select_site_headers(hostname: str) -> dict: # Return the site's headers with get_cursor() as cur: - cur.execute("""SELECT public_headers, private_headers, cookies FROM sites + cur.execute( + """SELECT public_headers, private_headers, cookies FROM sites WHERE domain = %s ORDER BY creation_time DESC LIMIT 1""", - (hostname,)) + (hostname,), + ) # If it has headers, merge the public and private headers together if cur.rowcount > 0: @@ -344,10 +379,7 @@ def select_site_headers(hostname: str) -> dict: private_headers = {} if row.get('private_headers') is None else row.get('private_headers') headers.update(private_headers) - return { - 'cookies': {} if row.get('cookies') is None else row.get('cookies'), - 'headers': headers - } + return {'cookies': {} if row.get('cookies') is None else row.get('cookies'), 'headers': headers} else: return {} @@ -355,20 +387,25 @@ def select_site_headers(hostname: str) -> dict: def select_site_id(hostname: str) -> int: # See if the site exists already with get_cursor() as cur: - cur.execute("""SELECT id FROM sites + cur.execute( + """SELECT id FROM sites WHERE domain = %s ORDER BY creation_time DESC LIMIT 1""", - (hostname,)) + (hostname,), + ) if cur.rowcount > 0: return cur.fetchone()['id'] # If not, let's create the site with get_cursor() as cur: - cur.execute("""INSERT INTO sites (domain, creation_time) + cur.execute( + """INSERT INTO sites (domain, creation_time) VALUES (%s, NOW()) - RETURNING id""", (hostname,)) + RETURNING id""", + (hostname,), + ) return cur.fetchone()['id'] @@ -390,21 +427,25 @@ def select_test_results(scan_id: int) -> dict: def update_scan_state(scan_id, state: str, error=None) -> dict: if error: with get_cursor() as cur: - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET (state, end_time, error) = (%s, NOW(), %s) WHERE id = %s RETURNING *""", - (state, error, scan_id)) + (state, error, scan_id), + ) row = dict(cur.fetchone()) else: with get_cursor() as cur: - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET state = %s WHERE id = %s RETURNING *""", - (state, scan_id)) + (state, scan_id), + ) row = dict(cur.fetchone()) @@ -413,7 +454,8 @@ def update_scan_state(scan_id, state: str, error=None) -> dict: def update_scans_dequeue_scans(num_to_dequeue: int = 0) -> dict: with get_cursor() as cur: - cur.execute("""UPDATE scans + cur.execute( + """UPDATE scans SET state = %s FROM ( SELECT sites.domain, scans.site_id, scans.id AS scan_id, scans.state @@ -424,6 +466,7 @@ def update_scans_dequeue_scans(num_to_dequeue: int = 0) -> dict: FOR UPDATE) sub WHERE scans.id = sub.scan_id RETURNING sub.domain, sub.site_id, sub.scan_id""", - (STATE_STARTING, STATE_PENDING, num_to_dequeue)) + (STATE_STARTING, STATE_PENDING, num_to_dequeue), + ) return cur.fetchall() diff --git a/httpobs/scanner/analyzer/__init__.py b/httpobs/scanner/analyzer/__init__.py index 2c549427..d3604b92 100644 --- a/httpobs/scanner/analyzer/__init__.py +++ b/httpobs/scanner/analyzer/__init__.py @@ -1,13 +1,17 @@ from .content import contribute, subresource_integrity -from .headers import (content_security_policy, cookies, public_key_pinning, referrer_policy, strict_transport_security, - x_content_type_options, x_xss_protection, x_frame_options) +from .headers import ( + content_security_policy, + cookies, + public_key_pinning, + referrer_policy, + strict_transport_security, + x_content_type_options, + x_xss_protection, + x_frame_options, +) from .misc import cross_origin_resource_sharing, redirection -__all__ = [ - 'NUM_TESTS', - 'tests', - 'TEST_NAMES' -] +__all__ = ['NUM_TESTS', 'tests', 'TEST_NAMES'] tests = ( content_security_policy, diff --git a/httpobs/scanner/analyzer/content.py b/httpobs/scanner/analyzer/content.py index a8db7bef..2dfd5592 100644 --- a/httpobs/scanner/analyzer/content.py +++ b/httpobs/scanner/analyzer/content.py @@ -124,12 +124,14 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- response = reqs['responses']['auto'] # The order of how "good" the results are - goodness = ['sri-implemented-and-all-scripts-loaded-securely', - 'sri-implemented-and-external-scripts-loaded-securely', - 'sri-implemented-but-external-scripts-not-loaded-securely', - 'sri-not-implemented-but-external-scripts-loaded-securely', - 'sri-not-implemented-and-external-scripts-not-loaded-securely', - 'sri-not-implemented-response-not-html'] + goodness = [ + 'sri-implemented-and-all-scripts-loaded-securely', + 'sri-implemented-and-external-scripts-loaded-securely', + 'sri-implemented-but-external-scripts-not-loaded-securely', + 'sri-not-implemented-but-external-scripts-loaded-securely', + 'sri-not-implemented-and-external-scripts-not-loaded-securely', + 'sri-not-implemented-response-not-html', + ] # If the content isn't HTML, there's no scripts to load; this is okay if response.headers.get('Content-Type', '').split(';')[0] not in HTML_TYPES: @@ -158,8 +160,11 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- # Check to see if they're on the same second-level domain # TODO: update the PSL list on startup psl = PublicSuffixList() - samesld = True if (psl.privatesuffix(urlparse(response.url).netloc) == - psl.privatesuffix(src.netloc)) else False + samesld = ( + True + if (psl.privatesuffix(urlparse(response.url).netloc) == psl.privatesuffix(src.netloc)) + else False + ) if src.scheme == '': if src.netloc == '': @@ -189,29 +194,28 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- # Add it to the scripts data result, if it's not a relative URI if not secureorigin: - output['data'][script['src']] = { - 'crossorigin': crossorigin, - 'integrity': integrity - } + output['data'][script['src']] = {'crossorigin': crossorigin, 'integrity': integrity} if integrity and not securescheme: - output['result'] = only_if_worse('sri-implemented-but-external-scripts-not-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-implemented-but-external-scripts-not-loaded-securely', output['result'], goodness + ) elif not integrity and securescheme: - output['result'] = only_if_worse('sri-not-implemented-but-external-scripts-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-not-implemented-but-external-scripts-loaded-securely', output['result'], goodness + ) elif not integrity and not securescheme and samesld: - output['result'] = only_if_worse('sri-not-implemented-and-external-scripts' - '-not-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-not-implemented-and-external-scripts' '-not-loaded-securely', + output['result'], + goodness, + ) elif not integrity and not securescheme: - output['result'] = only_if_worse('sri-not-implemented-and-external-scripts' - '-not-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-not-implemented-and-external-scripts' '-not-loaded-securely', + output['result'], + goodness, + ) # Grant bonus even if they use SRI on the same origin else: @@ -228,20 +232,22 @@ def subresource_integrity(reqs: dict, expectation='sri-implemented-and-external- # If the page loaded from a foreign origin, but everything included SRI elif scripts and scripts_on_foreign_origin and not output['result']: - output['result'] = only_if_worse('sri-implemented-and-external-scripts-loaded-securely', - output['result'], - goodness) + output['result'] = only_if_worse( + 'sri-implemented-and-external-scripts-loaded-securely', output['result'], goodness + ) # Code defensively on the size of the data output['data'] = output['data'] if len(str(output['data'])) < 32768 else {} # Check to see if the test passed or failed - if output['result'] in ('sri-implemented-and-all-scripts-loaded-securely', - 'sri-implemented-and-external-scripts-loaded-securely', - 'sri-not-implemented-response-not-html', - 'sri-not-implemented-but-all-scripts-loaded-from-secure-origin', - 'sri-not-implemented-but-no-scripts-loaded', - expectation): + if output['result'] in ( + 'sri-implemented-and-all-scripts-loaded-securely', + 'sri-implemented-and-external-scripts-loaded-securely', + 'sri-not-implemented-response-not-html', + 'sri-not-implemented-but-all-scripts-loaded-from-secure-origin', + 'sri-not-implemented-but-no-scripts-loaded', + expectation, + ): output['pass'] = True return output diff --git a/httpobs/scanner/analyzer/headers.py b/httpobs/scanner/analyzer/headers.py index a6b8cdee..bf9658a3 100644 --- a/httpobs/scanner/analyzer/headers.py +++ b/httpobs/scanner/analyzer/headers.py @@ -78,23 +78,29 @@ def __parse_csp(csp_strings: list) -> Dict[str, Set]: # we have to do this to make the domain lowercase for comparisons later url = urlparse(source) url = url._replace(netloc=url.netloc.lower()) - values.append({ - 'source': urlunparse(url), - 'index': policy_index, - 'keep': True if policy_index == 0 else False, - }) + values.append( + { + 'source': urlunparse(url), + 'index': policy_index, + 'keep': True if policy_index == 0 else False, + } + ) else: - values.append({ - 'source': source.lower(), - 'index': policy_index, - 'keep': True if policy_index == 0 else False, - }) + values.append( + { + 'source': source.lower(), + 'index': policy_index, + 'keep': True if policy_index == 0 else False, + } + ) elif len(entry) == 1 and directive.endswith("-src"): # if it's a source list with no values, it's 'none' - values = [{ - 'source': "'none'", - 'index': policy_index, - 'keep': True if policy_index == 0 else False, - }] + values = [ + { + 'source': "'none'", + 'index': policy_index, + 'keep': True if policy_index == 0 else False, + } + ] else: values = [] @@ -167,8 +173,8 @@ def content_security_policy(reqs: dict, expectation='csp-implemented-with-no-uns output = { 'data': None, 'expectation': expectation, - 'http': False, # whether an HTTP header was available - 'meta': False, # whether an HTTP meta-equiv was available + 'http': False, # whether an HTTP header was available + 'meta': False, # whether an HTTP meta-equiv was available 'pass': False, 'policy': None, 'result': None, @@ -251,73 +257,79 @@ def content_security_policy(reqs: dict, expectation='csp-implemented-with-no-uns # 3. Remove 'self' and 'unsafe-inline' if any(source.startswith(NONCES_HASHES) for source in script_src) and '\'strict-dynamic\'' in script_src: for source in set(script_src): - if (source.startswith(DANGEROUSLY_BROAD) or - source == '\'self\'' or - source == '\'unsafe-inline\''): + if source.startswith(DANGEROUSLY_BROAD) or source == '\'self\'' or source == '\'unsafe-inline\'': script_src.remove(source) output['policy']['strictDynamic'] = True # 'strict-dynamic' in script-src without hash or nonce elif '\'strict-dynamic\'' in script_src: - output['result'] = ('csp-header-invalid' if output['result'] is None - else output['result']) + output['result'] = 'csp-header-invalid' if output['result'] is None else output['result'] # Some checks look only at active/passive CSP directives # This could be inlined, but the code is quite hard to read at that point - active_csp_sources = [source for directive, source_list in csp.items() for source in source_list if - directive not in PASSIVE_DIRECTIVES and directive not in 'script-src'] + list(script_src) - passive_csp_sources = [source for source_list in - [csp.get(directive, csp.get('default-src', [])) for directive in PASSIVE_DIRECTIVES] - for source in source_list] + active_csp_sources = [ + source + for directive, source_list in csp.items() + for source in source_list + if directive not in PASSIVE_DIRECTIVES and directive not in 'script-src' + ] + list(script_src) + passive_csp_sources = [ + source + for source_list in [csp.get(directive, csp.get('default-src', [])) for directive in PASSIVE_DIRECTIVES] + for source in source_list + ] # No 'unsafe-inline' or data: in script-src # Also don't allow overly broad schemes such as https: in either object-src or script-src # Likewise, if you don't have object-src or script-src defined, then all sources are allowed - if (script_src.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE) or - object_src.intersection(DANGEROUSLY_BROAD)): - output['result'] = ('csp-implemented-with-unsafe-inline' if output['result'] is None - else output['result']) + if script_src.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE) or object_src.intersection(DANGEROUSLY_BROAD): + output['result'] = 'csp-implemented-with-unsafe-inline' if output['result'] is None else output['result'] output['policy']['unsafeInline'] = True # If the site is https, it shouldn't allow any http: as a source (active content) - if (urlparse(response.url).scheme == 'https' and - [source for source in active_csp_sources if 'http:' in source or 'ftp:' in source] and - not output['policy']['strictDynamic']): - output['result'] = ('csp-implemented-with-insecure-scheme' if output['result'] is None - else output['result']) + if ( + urlparse(response.url).scheme == 'https' + and [source for source in active_csp_sources if 'http:' in source or 'ftp:' in source] + and not output['policy']['strictDynamic'] + ): + output['result'] = 'csp-implemented-with-insecure-scheme' if output['result'] is None else output['result'] output['policy']['insecureSchemeActive'] = True # Don't allow 'unsafe-eval' in script-src or style-src if script_src.union(style_src).intersection({'\'unsafe-eval\''}): - output['result'] = ('csp-implemented-with-unsafe-eval' if output['result'] is None - else output['result']) + output['result'] = 'csp-implemented-with-unsafe-eval' if output['result'] is None else output['result'] output['policy']['unsafeEval'] = True # If the site is https, it shouldn't allow any http: as a source (passive content) - if (urlparse(response.url).scheme == 'https' and - [source for source in passive_csp_sources if 'http:' in source or 'ftp:' in source]): - output['result'] = ('csp-implemented-with-insecure-scheme-in-passive-content-only' if output['result'] is None - else output['result']) + if urlparse(response.url).scheme == 'https' and [ + source for source in passive_csp_sources if 'http:' in source or 'ftp:' in source + ]: + output['result'] = ( + 'csp-implemented-with-insecure-scheme-in-passive-content-only' + if output['result'] is None + else output['result'] + ) output['policy']['insecureSchemePassive'] = True # Don't allow 'unsafe-inline', data:, or overly broad sources in style-src if style_src.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE): - output['result'] = ('csp-implemented-with-unsafe-inline-in-style-src-only' if output['result'] is None - else output['result']) + output['result'] = ( + 'csp-implemented-with-unsafe-inline-in-style-src-only' if output['result'] is None else output['result'] + ) output['policy']['unsafeInlineStyle'] = True # Only if default-src is 'none' and 'none' alone, since additional uris override 'none' if csp.get('default-src') == {'\'none\''}: - output['result'] = ('csp-implemented-with-no-unsafe-default-src-none' if output['result'] is None - else output['result']) + output['result'] = ( + 'csp-implemented-with-no-unsafe-default-src-none' if output['result'] is None else output['result'] + ) output['policy']['defaultNone'] = True else: - output['result'] = ('csp-implemented-with-no-unsafe' if output['result'] is None - else output['result']) + output['result'] = 'csp-implemented-with-no-unsafe' if output['result'] is None else output['result'] # Some other checks for the CSP analyzer - output['policy']['antiClickjacking'] = (not bool(frame_ancestors.intersection(DANGEROUSLY_BROAD))) + output['policy']['antiClickjacking'] = not bool(frame_ancestors.intersection(DANGEROUSLY_BROAD)) output['policy']['insecureBaseUri'] = bool(base_uri.intersection(DANGEROUSLY_BROAD + UNSAFE_INLINE)) - output['policy']['insecureFormAction'] = (bool(form_action.intersection(DANGEROUSLY_BROAD))) + output['policy']['insecureFormAction'] = bool(form_action.intersection(DANGEROUSLY_BROAD)) output['policy']['unsafeObjects'] = bool(object_src.intersection(DANGEROUSLY_BROAD)) # Once we're done, convert every set() in csp to an array @@ -329,10 +341,12 @@ def content_security_policy(reqs: dict, expectation='csp-implemented-with-no-uns output['data'] = csp if len(str(csp)) < 32768 else {} # Check to see if the test passed or failed - if output['result'] in (expectation, - 'csp-implemented-with-no-unsafe-default-src-none', - 'csp-implemented-with-unsafe-inline-in-style-src-only', - 'csp-implemented-with-insecure-scheme-in-passive-content-only'): + if output['result'] in ( + expectation, + 'csp-implemented-with-no-unsafe-default-src-none', + 'csp-implemented-with-unsafe-inline-in-style-src-only', + 'csp-implemented-with-insecure-scheme-in-passive-content-only', + ): output['pass'] = True return output @@ -374,13 +388,15 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> session = reqs['session'] # all requests and their associated cookies # The order of how bad the various results are - goodness = ['cookies-without-secure-flag-but-protected-by-hsts', - 'cookies-without-secure-flag', - 'cookies-session-without-secure-flag-but-protected-by-hsts', - 'cookies-samesite-flag-invalid', - 'cookies-anticsrf-without-samesite-flag', - 'cookies-session-without-httponly-flag', - 'cookies-session-without-secure-flag'] + goodness = [ + 'cookies-without-secure-flag-but-protected-by-hsts', + 'cookies-without-secure-flag', + 'cookies-session-without-secure-flag-but-protected-by-hsts', + 'cookies-samesite-flag-invalid', + 'cookies-anticsrf-without-samesite-flag', + 'cookies-session-without-httponly-flag', + 'cookies-session-without-secure-flag', + ] # TODO: Support cookies set over http-equiv (ugh) # https://github.com/mozilla/http-observatory/issues/265 @@ -415,54 +431,44 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> elif samesiteVal.strip().lower() == 'none': cookie.samesite = 'None' else: - output['result'] = only_if_worse('cookies-samesite-flag-invalid', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-samesite-flag-invalid', output['result'], goodness) # Add it to the jar - jar[cookie.name] = {i: getattr(cookie, i, None) for i in ['domain', 'expires', 'httponly', - 'max-age', 'path', 'port', 'samesite', 'secure']} + jar[cookie.name] = { + i: getattr(cookie, i, None) + for i in ['domain', 'expires', 'httponly', 'max-age', 'path', 'port', 'samesite', 'secure'] + } # Is it a session identifier or an anti-csrf token? sessionid = any(i in cookie.name.lower() for i in ('login', 'sess')) anticsrf = True if 'csrf' in cookie.name.lower() else False if not cookie.secure and cookie.samesite == 'None': - output['result'] = only_if_worse('cookies-samesite-flag-invalid', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-samesite-flag-invalid', output['result'], goodness) if not cookie.secure and hsts: - output['result'] = only_if_worse('cookies-without-secure-flag-but-protected-by-hsts', - output['result'], - goodness) + output['result'] = only_if_worse( + 'cookies-without-secure-flag-but-protected-by-hsts', output['result'], goodness + ) elif not cookie.secure: - output['result'] = only_if_worse('cookies-without-secure-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-without-secure-flag', output['result'], goodness) # Anti-CSRF tokens should be set using the SameSite option if anticsrf and not cookie.samesite: - output['result'] = only_if_worse('cookies-anticsrf-without-samesite-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-anticsrf-without-samesite-flag', output['result'], goodness) # Login and session cookies should be set with Secure if sessionid and not cookie.secure and hsts: - output['result'] = only_if_worse('cookies-session-without-secure-flag-but-protected-by-hsts', - output['result'], - goodness) + output['result'] = only_if_worse( + 'cookies-session-without-secure-flag-but-protected-by-hsts', output['result'], goodness + ) elif sessionid and not cookie.secure: - output['result'] = only_if_worse('cookies-session-without-secure-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-session-without-secure-flag', output['result'], goodness) # Login and session cookies should be set with HttpOnly if sessionid and not cookie.httponly: - output['result'] = only_if_worse('cookies-session-without-httponly-flag', - output['result'], - goodness) + output['result'] = only_if_worse('cookies-session-without-httponly-flag', output['result'], goodness) # Store whether or not we saw SameSite cookies, if cookies were set if output['result'] is None: @@ -477,9 +483,7 @@ def cookies(reqs: dict, expectation='cookies-secure-with-httponly-sessions') -> output['data'] = jar if len(str(jar)) < 32768 else {} # Check to see if the test passed or failed - if output['result'] in ('cookies-not-found', - 'cookies-secure-with-httponly-sessions-and-samesite', - expectation): + if output['result'] in ('cookies-not-found', 'cookies-secure-with-httponly-sessions-and-samesite', expectation): output['pass'] = True return output @@ -594,20 +598,15 @@ def referrer_policy(reqs: dict, expectation='referrer-policy-private') -> dict: output = { 'data': None, 'expectation': expectation, - 'http': False, # whether an HTTP header was available - 'meta': False, # whether an HTTP meta-equiv was available + 'http': False, # whether an HTTP header was available + 'meta': False, # whether an HTTP meta-equiv was available 'pass': False, 'result': None, } - goodness = ['no-referrer', - 'same-origin', - 'strict-origin', - 'strict-origin-when-cross-origin'] + goodness = ['no-referrer', 'same-origin', 'strict-origin', 'strict-origin-when-cross-origin'] - badness = ['origin', - 'origin-when-cross-origin', - 'unsafe-url'] + badness = ['origin', 'origin-when-cross-origin', 'unsafe-url'] valid = goodness + badness + ['no-referrer-when-downgrade'] @@ -619,8 +618,9 @@ def referrer_policy(reqs: dict, expectation='referrer-policy-private') -> dict: # If it's in both a header and http-equiv, http-equiv gets precedence (aka comes last) if 'Referrer-Policy' in response.headers and 'Referrer-Policy' in response.http_equiv: - output['data'] = ', '.join([response.headers['Referrer-Policy'], - response.http_equiv['Referrer-Policy']])[0:256] # Code defensively + output['data'] = ', '.join([response.headers['Referrer-Policy'], response.http_equiv['Referrer-Policy']])[ + 0:256 + ] # Code defensively elif 'Referrer-Policy' in response.headers or 'Referrer-Policy' in response.http_equiv: output['data'] = (response.http_equiv.get('Referrer-Policy') or response.headers.get('Referrer-Policy'))[0:256] else: @@ -642,10 +642,12 @@ def referrer_policy(reqs: dict, expectation='referrer-policy-private') -> dict: output['result'] = 'referrer-policy-header-invalid' # Test passed or failed - if output['result'] in ('referrer-policy-private', - 'referrer-policy-not-implemented', - 'referrer-policy-no-referrer-when-downgrade', - expectation): + if output['result'] in ( + 'referrer-policy-private', + 'referrer-policy-not-implemented', + 'referrer-policy-no-referrer-when-downgrade', + expectation, + ): output['pass'] = True return output @@ -732,9 +734,7 @@ def strict_transport_security(reqs: dict, expectation='hsts-implemented-max-age- output['preloaded'] = True # Check to see if the test passed or failed - if output['result'] in ('hsts-implemented-max-age-at-least-six-months', - 'hsts-preloaded', - expectation): + if output['result'] in ('hsts-implemented-max-age-at-least-six-months', 'hsts-preloaded', expectation): output['pass'] = True return output @@ -825,10 +825,12 @@ def x_frame_options(reqs: dict, expectation='x-frame-options-sameorigin-or-deny' output['result'] = 'x-frame-options-implemented-via-csp' # Check to see if the test passed or failed - if output['result'] in ('x-frame-options-allow-from-origin', - 'x-frame-options-sameorigin-or-deny', - 'x-frame-options-implemented-via-csp', - expectation): + if output['result'] in ( + 'x-frame-options-allow-from-origin', + 'x-frame-options-sameorigin-or-deny', + 'x-frame-options-implemented-via-csp', + expectation, + ): output['pass'] = True return output @@ -862,7 +864,7 @@ def x_xss_protection(reqs: dict, expectation='x-xss-protection-1-mode-block') -> } enabled = False # XXSSP enabled or not - valid = True # XXSSP header valid or not + valid = True # XXSSP header valid or not response = reqs['responses']['auto'] header = response.headers.get('X-XSS-Protection', '').strip() xxssp = {} diff --git a/httpobs/scanner/analyzer/misc.py b/httpobs/scanner/analyzer/misc.py index 20fd0feb..65d5914e 100644 --- a/httpobs/scanner/analyzer/misc.py +++ b/httpobs/scanner/analyzer/misc.py @@ -17,8 +17,9 @@ def __parse_acao_xml_get_domains(xml, type='crossdomain') -> list: # Parse the files if type == 'crossdomain': - return [domains.get('domain').strip() - for domains in soup.find_all('allow-access-from') if domains.get('domain')] + return [ + domains.get('domain').strip() for domains in soup.find_all('allow-access-from') if domains.get('domain') + ] elif type == 'clientaccesspolicy': return [domains.get('uri').strip() for domains in soup.find_all('domain') if domains.get('uri')] @@ -40,11 +41,7 @@ def cross_origin_resource_sharing(reqs: dict, expectation='cross-origin-resource result: short string describing the result of the test """ output = { - 'data': { - 'acao': None, - 'clientaccesspolicy': None, - 'crossdomain': None - }, + 'data': {'acao': None, 'clientaccesspolicy': None, 'crossdomain': None}, 'expectation': expectation, 'pass': False, 'result': 'cross-origin-resource-sharing-not-implemented', @@ -59,8 +56,10 @@ def cross_origin_resource_sharing(reqs: dict, expectation='cross-origin-resource if output['data']['acao'] == '*': output['result'] = 'cross-origin-resource-sharing-implemented-with-public-access' - elif (acao.request.headers.get('Origin') == acao.headers['Access-Control-Allow-Origin'] and - acao.headers.get('Access-Control-Allow-Credentials', '').lower().strip() == 'true'): + elif ( + acao.request.headers.get('Origin') == acao.headers['Access-Control-Allow-Origin'] + and acao.headers.get('Access-Control-Allow-Credentials', '').lower().strip() == 'true' + ): output['result'] = 'cross-origin-resource-sharing-implemented-with-universal-access' else: output['result'] = 'cross-origin-resource-sharing-implemented-with-restricted-access' @@ -88,9 +87,11 @@ def cross_origin_resource_sharing(reqs: dict, expectation='cross-origin-resource output['result'] = 'cross-origin-resource-sharing-implemented-with-restricted-access' # Check to see if the test passed or failed - if output['result'] in ('cross-origin-resource-sharing-implemented-with-public-access', - 'cross-origin-resource-sharing-implemented-with-restricted-access', - expectation): + if output['result'] in ( + 'cross-origin-resource-sharing-implemented-with-public-access', + 'cross-origin-resource-sharing-implemented-with-restricted-access', + expectation, + ): output['pass'] = True return output @@ -165,8 +166,7 @@ def redirection(reqs: dict, expectation='redirection-to-https') -> dict: # If it's an http -> https redirection, make sure it redirects to the same host. If that's not done, then # HSTS cannot be properly set on the original host # TODO: Check for redirections like: http://www.example.com -> https://example.com -> https://www.example.com - elif (route[0].scheme == 'http' and route[1].scheme == 'https' and - route[0].hostname != route[1].hostname): + elif route[0].scheme == 'http' and route[1].scheme == 'https' and route[0].hostname != route[1].hostname: output['result'] = 'redirection-off-host-from-http' output['status_code'] = response.history[-1].status_code else: @@ -177,9 +177,7 @@ def redirection(reqs: dict, expectation='redirection-to-https') -> dict: output['status_code'] = output['status_code'] if len(str(output['status_code'])) < 5 else None # Check to see if the test passed or failed - if output['result'] in ('redirection-not-needed-no-http', - 'redirection-all-redirects-preloaded', - expectation): + if output['result'] in ('redirection-not-needed-no-http', 'redirection-all-redirects-preloaded', expectation): output['pass'] = True return output diff --git a/httpobs/scanner/grader/__init__.py b/httpobs/scanner/grader/__init__.py index c42f32de..e82f9034 100644 --- a/httpobs/scanner/grader/__init__.py +++ b/httpobs/scanner/grader/__init__.py @@ -1,12 +1,16 @@ -from .grade import (get_score_description, - get_score_modifier, - get_grade_and_likelihood_for_score, - GRADES, - MINIMUM_SCORE_FOR_EXTRA_CREDIT) +from .grade import ( + get_score_description, + get_score_modifier, + get_grade_and_likelihood_for_score, + GRADES, + MINIMUM_SCORE_FOR_EXTRA_CREDIT, +) -__all__ = ['get_score_description', - 'get_score_modifier', - 'get_grade_and_likelihood_for_score', - 'GRADES', - 'MINIMUM_SCORE_FOR_EXTRA_CREDIT'] +__all__ = [ + 'get_score_description', + 'get_score_modifier', + 'get_grade_and_likelihood_for_score', + 'GRADES', + 'MINIMUM_SCORE_FOR_EXTRA_CREDIT', +] diff --git a/httpobs/scanner/grader/grade.py b/httpobs/scanner/grader/grade.py index 0bb53162..e5095852 100644 --- a/httpobs/scanner/grader/grade.py +++ b/httpobs/scanner/grader/grade.py @@ -19,20 +19,14 @@ 15: 'F', 10: 'F', 5: 'F', - 0: 'F' + 0: 'F', } # See https://wiki.mozilla.org/Security/Standard_Levels for a definition of the risk levels # We cannot make an accurate decision on HIGH and MAXIMUM risk likelihood indicators with the current checks, # thus the likelihood indicator is currently at best (or worse) MEDIUM. Modifiers (A-A+B+B-, ... are normalized # A,B, ...) in the calling function. -LIKELIHOOD_INDICATOR_CHART = { - 'A': 'LOW', - 'B': 'MEDIUM', - 'C': 'MEDIUM', - 'D': 'MEDIUM', - 'F': 'MEDIUM' -} +LIKELIHOOD_INDICATOR_CHART = {'A': 'LOW', 'B': 'MEDIUM', 'C': 'MEDIUM', 'D': 'MEDIUM', 'F': 'MEDIUM'} # The minimum required score to receive extra credit MINIMUM_SCORE_FOR_EXTRA_CREDIT = 90 @@ -61,7 +55,6 @@ 'description': 'Contribute.json file cannot be parsed', 'modifier': -10, }, - # CSP 'csp-implemented-with-no-unsafe-default-src-none': { 'description': 'Content Security Policy (CSP) implemented with default-src \'none\' and no \'unsafe\'', @@ -72,13 +65,17 @@ 'modifier': 5, }, 'csp-implemented-with-unsafe-inline-in-style-src-only': { - 'description': ('Content Security Policy (CSP) implemented with unsafe sources inside style-src. ' - 'This includes \'unsafe-inline\', data: or overly broad sources such as https:.'), + 'description': ( + 'Content Security Policy (CSP) implemented with unsafe sources inside style-src. ' + 'This includes \'unsafe-inline\', data: or overly broad sources such as https:.' + ), 'modifier': 0, }, 'csp-implemented-with-insecure-scheme-in-passive-content-only': { - 'description': ('Content Security Policy (CSP) implemented, ' - 'but secure site allows images or media to be loaded over HTTP'), + 'description': ( + 'Content Security Policy (CSP) implemented, ' + 'but secure site allows images or media to be loaded over HTTP' + ), 'modifier': -10, }, 'csp-implemented-with-unsafe-eval': { @@ -86,15 +83,18 @@ 'modifier': -10, }, 'csp-implemented-with-unsafe-inline': { - 'description': ('Content Security Policy (CSP) implemented unsafely. ' - 'This includes \'unsafe-inline\' or data: inside script-src, ' - 'overly broad sources such as https: inside object-src or script-src, ' - 'or not restricting the sources for object-src or script-src.'), + 'description': ( + 'Content Security Policy (CSP) implemented unsafely. ' + 'This includes \'unsafe-inline\' or data: inside script-src, ' + 'overly broad sources such as https: inside object-src or script-src, ' + 'or not restricting the sources for object-src or script-src.' + ), 'modifier': -20, }, 'csp-implemented-with-insecure-scheme': { - 'description': ('Content Security Policy (CSP) implemented, ' - 'but secure site allows resources to be loaded over HTTP'), + 'description': ( + 'Content Security Policy (CSP) implemented, ' 'but secure site allows resources to be loaded over HTTP' + ), 'modifier': -20, }, 'csp-header-invalid': { @@ -105,13 +105,13 @@ 'description': 'Content Security Policy (CSP) header not implemented', 'modifier': -25, }, - # Cookies 'cookies-secure-with-httponly-sessions-and-samesite': { - 'description': ('All cookies use the Secure flag, session cookies use the HttpOnly flag, and cross-origin ' - 'restrictions are in place via the SameSite flag'), + 'description': ( + 'All cookies use the Secure flag, session cookies use the HttpOnly flag, and cross-origin ' + 'restrictions are in place via the SameSite flag' + ), 'modifier': 5, - }, 'cookies-secure-with-httponly-sessions': { 'description': 'All cookies use the Secure flag and all session cookies use the HttpOnly flag', @@ -149,27 +149,28 @@ 'description': 'Session cookie set without using the Secure flag or set over HTTP', 'modifier': -40, }, - # Cross-origin resource sharing 'cross-origin-resource-sharing-not-implemented': { 'description': 'Content is not visible via cross-origin resource sharing (CORS) files or headers', 'modifier': 0, }, 'cross-origin-resource-sharing-implemented-with-public-access': { - 'description': ('Public content is visible via cross-origin resource sharing (CORS) ' - 'Access-Control-Allow-Origin header'), + 'description': ( + 'Public content is visible via cross-origin resource sharing (CORS) ' 'Access-Control-Allow-Origin header' + ), 'modifier': 0, }, 'cross-origin-resource-sharing-implemented-with-restricted-access': { - 'description': ('Content is visible via cross-origin resource sharing (CORS) files or headers, ' - 'but is restricted to specific domains'), + 'description': ( + 'Content is visible via cross-origin resource sharing (CORS) files or headers, ' + 'but is restricted to specific domains' + ), 'modifier': 0, }, 'cross-origin-resource-sharing-implemented-with-universal-access': { 'description': 'Content is visible via cross-origin resource sharing (CORS) file or headers', 'modifier': -50, }, - # Public Key Pinning 'hpkp-preloaded': { 'description': 'Preloaded via the HTTP Public Key Pinning (HPKP) preloading process', @@ -192,15 +193,15 @@ 'modifier': 0, }, 'hpkp-invalid-cert': { - 'description': ('HTTP Public Key Pinning (HPKP) header cannot be set, ' - 'as site contains an invalid certificate chain'), + 'description': ( + 'HTTP Public Key Pinning (HPKP) header cannot be set, ' 'as site contains an invalid certificate chain' + ), 'modifier': 0, }, 'hpkp-header-invalid': { 'description': 'HTTP Public Key Pinning (HPKP) header cannot be recognized', 'modifier': -5, }, - # Redirection 'redirection-all-redirects-preloaded': { 'description': 'All hosts redirected to are in the HTTP Strict Transport Security (HSTS) preload list', @@ -234,11 +235,12 @@ 'description': 'Invalid certificate chain encountered during redirection', 'modifier': -20, }, - # Referrer Policy 'referrer-policy-private': { - 'description': ('Referrer-Policy header set to "no-referrer", "same-origin", "strict-origin" or ' - '"strict-origin-when-cross-origin"'), + 'description': ( + 'Referrer-Policy header set to "no-referrer", "same-origin", "strict-origin" or ' + '"strict-origin-when-cross-origin"' + ), 'modifier': 5, }, 'referrer-policy-no-referrer-when-downgrade': { @@ -257,7 +259,6 @@ 'description': 'Referrer-Policy header cannot be recognized', 'modifier': -5, }, - # Strict Transport Security (HSTS) 'hsts-preloaded': { 'description': 'Preloaded via the HTTP Strict Transport Security (HSTS) preloading process', @@ -284,11 +285,12 @@ 'modifier': -20, }, 'hsts-invalid-cert': { - 'description': ('HTTP Strict Transport Security (HSTS) header cannot be set, ' - 'as site contains an invalid certificate chain'), + 'description': ( + 'HTTP Strict Transport Security (HSTS) header cannot be set, ' + 'as site contains an invalid certificate chain' + ), 'modifier': -20, }, - # Subresource Integrity (SRI) 'sri-implemented-and-all-scripts-loaded-securely': { 'description': 'Subresource Integrity (SRI) is implemented and all scripts are loaded from a similar origin', @@ -315,16 +317,19 @@ 'modifier': -5, }, 'sri-implemented-but-external-scripts-not-loaded-securely': { - 'description': ('Subresource Integrity (SRI) implemented, but external scripts are loaded over HTTP or use ' - 'protocol-relative URLs via src="//..."'), + 'description': ( + 'Subresource Integrity (SRI) implemented, but external scripts are loaded over HTTP or use ' + 'protocol-relative URLs via src="//..."' + ), 'modifier': -20, }, 'sri-not-implemented-and-external-scripts-not-loaded-securely': { - 'description': ('Subresource Integrity (SRI) not implemented, and external scripts are loaded over HTTP or ' - 'use protocol-relative URLs via src="//..."'), + 'description': ( + 'Subresource Integrity (SRI) not implemented, and external scripts are loaded over HTTP or ' + 'use protocol-relative URLs via src="//..."' + ), 'modifier': -50, }, - # X-Content-Type-Options 'x-content-type-options-nosniff': { 'description': 'X-Content-Type-Options header set to "nosniff"', @@ -338,7 +343,6 @@ 'description': 'X-Content-Type-Options header cannot be recognized', 'modifier': -5, }, - # X-Frame-Options 'x-frame-options-implemented-via-csp': { 'description': 'X-Frame-Options (XFO) implemented via the CSP frame-ancestors directive', @@ -360,7 +364,6 @@ 'description': 'X-Frame-Options (XFO) header cannot be recognized', 'modifier': -20, }, - # X-XSS-Protection 'x-xss-protection-enabled-mode-block': { 'description': 'X-XSS-Protection header set to "1; mode=block"', @@ -386,7 +389,6 @@ 'description': 'X-XSS-Protection header cannot be recognized', 'modifier': -10, }, - # Generic results 'html-not-parsable': { 'description': 'Claims to be html, but cannot be parsed', @@ -399,7 +401,7 @@ 'xml-not-parsable': { 'description': 'Claims to be xml, but cannot be parsed', 'modifier': -20, # can't run an ACAO check if the xml files can't be parsed - } + }, } diff --git a/httpobs/scanner/local.py b/httpobs/scanner/local.py index 3cb86e6b..d0de410b 100644 --- a/httpobs/scanner/local.py +++ b/httpobs/scanner/local.py @@ -73,5 +73,5 @@ def scan(hostname, **kwargs): 'tests_passed': tests_passed, 'tests_quantity': NUM_TESTS, }, - 'tests': {result.pop('name'): result for result in results} + 'tests': {result.pop('name'): result for result in results}, } diff --git a/httpobs/scanner/main.py b/httpobs/scanner/main.py index 81fea97a..5152e7f1 100644 --- a/httpobs/scanner/main.py +++ b/httpobs/scanner/main.py @@ -2,19 +2,19 @@ from time import sleep from urllib.parse import parse_qs, urlparse -from httpobs.conf import (BROKER_URL, - SCANNER_ALLOW_KICKSTART, - SCANNER_ALLOW_KICKSTART_NUM_ABORTED, - SCANNER_BROKER_RECONNECTION_SLEEP_TIME, - SCANNER_CYCLE_SLEEP_TIME, - SCANNER_DATABASE_RECONNECTION_SLEEP_TIME, - SCANNER_MAINTENANCE_CYCLE_FREQUENCY, - SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY, - SCANNER_MAX_CPU_UTILIZATION, - SCANNER_MAX_LOAD) -from httpobs.database import (periodic_maintenance, - refresh_materialized_views, - update_scans_dequeue_scans) +from httpobs.conf import ( + BROKER_URL, + SCANNER_ALLOW_KICKSTART, + SCANNER_ALLOW_KICKSTART_NUM_ABORTED, + SCANNER_BROKER_RECONNECTION_SLEEP_TIME, + SCANNER_CYCLE_SLEEP_TIME, + SCANNER_DATABASE_RECONNECTION_SLEEP_TIME, + SCANNER_MAINTENANCE_CYCLE_FREQUENCY, + SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY, + SCANNER_MAX_CPU_UTILIZATION, + SCANNER_MAX_LOAD, +) +from httpobs.database import periodic_maintenance, refresh_materialized_views, update_scans_dequeue_scans from httpobs.scanner.tasks import scan import datetime @@ -50,10 +50,12 @@ def main(): if headroom <= 0: # If the cycle sleep time is .5, sleep 2 seconds at a minimum, 10 seconds at a maximum sleep_time = min(max(abs(headroom), SCANNER_CYCLE_SLEEP_TIME * 4), 10) - print('[{time}] WARNING: Load too high. Sleeping for {num} second(s).'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=sleep_time), - file=sys.stderr) + print( + '[{time}] WARNING: Load too high. Sleeping for {num} second(s).'.format( + time=str(datetime.datetime.now()).split('.')[0], num=sleep_time + ), + file=sys.stderr, + ) sleep(sleep_time) continue @@ -69,35 +71,49 @@ def main(): # If it fails, we don't care. Of course, nobody reads the comments, so I should say that *I* don't care. try: if dequeue_loop_count % SCANNER_MAINTENANCE_CYCLE_FREQUENCY == 0: - print('[{time}] INFO: Performing periodic maintenance.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) + print( + '[{time}] INFO: Performing periodic maintenance.'.format( + time=str(datetime.datetime.now()).split('.')[0] + ), + file=sys.stderr, + ) dequeue_loop_count = 0 num = periodic_maintenance() if num > 0: - print('[{time}] INFO: Cleared {num} broken scan(s).'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=num), - file=sys.stderr) + print( + '[{time}] INFO: Cleared {num} broken scan(s).'.format( + time=str(datetime.datetime.now()).split('.')[0], num=num + ), + file=sys.stderr, + ) # Forcibly restart if things are going real bad, sleep for a bit to avoid flagging if num > SCANNER_ALLOW_KICKSTART_NUM_ABORTED and SCANNER_ALLOW_KICKSTART: sleep(10) try: - print('[{time}] ERROR: Celery appears to be hung. Attempting to kickstart the scanners.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) + print( + '[{time}] ERROR: Celery appears to be hung. Attempting to kickstart the scanners.'.format( + time=str(datetime.datetime.now()).split('.')[0] + ), + file=sys.stderr, + ) subprocess.call(['pkill', '-u', 'httpobs']) except FileNotFoundError: - print('[{time}] ERROR: Tried to kickstart, but no pkill found.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) + print( + '[{time}] ERROR: Tried to kickstart, but no pkill found.'.format( + time=str(datetime.datetime.now()).split('.')[0] + ), + file=sys.stderr, + ) except: - print('[{time}] ERROR: Tried to kickstart, but failed for unknown reasons.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) + print( + '[{time}] ERROR: Tried to kickstart, but failed for unknown reasons.'.format( + time=str(datetime.datetime.now()).split('.')[0] + ), + file=sys.stderr, + ) except: pass finally: @@ -107,16 +123,22 @@ def main(): # Every so often we need to refresh the materialized views that the statistics depend on try: if materialized_view_loop_count % SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY == 0: - print('[{time}] INFO: Refreshing materialized views.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) + print( + '[{time}] INFO: Refreshing materialized views.'.format( + time=str(datetime.datetime.now()).split('.')[0] + ), + file=sys.stderr, + ) materialized_view_loop_count = 0 refresh_materialized_views() - print('[{time}] INFO: Materialized views refreshed.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) + print( + '[{time}] INFO: Materialized views refreshed.'.format( + time=str(datetime.datetime.now()).split('.')[0] + ), + file=sys.stderr, + ) except: pass finally: @@ -125,25 +147,27 @@ def main(): # Verify that the broker is still up; if it's down, let's sleep and try again later try: if broker_url.scheme.lower() == 'redis': - conn = redis.Connection(host=broker_url.hostname, - port=broker_url.port or 6379, - db=int(broker_url.path[1:] if len(broker_url.path) > 0 else 0), - password=broker_url.password) + conn = redis.Connection( + host=broker_url.hostname, + port=broker_url.port or 6379, + db=int(broker_url.path[1:] if len(broker_url.path) > 0 else 0), + password=broker_url.password, + ) else: - conn = redis.UnixDomainSocketConnection(path=broker_url.path, - db=int(parse_qs(broker_url.query).get( - 'virtual_host', ['0']) - [0])) + conn = redis.UnixDomainSocketConnection( + path=broker_url.path, db=int(parse_qs(broker_url.query).get('virtual_host', ['0'])[0]) + ) conn.connect() conn.can_read() conn.disconnect() del conn except: - print('[{time}] ERROR: Unable to connect to to redis. Sleeping for {num} seconds.'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=SCANNER_BROKER_RECONNECTION_SLEEP_TIME), - file=sys.stderr + print( + '[{time}] ERROR: Unable to connect to to redis. Sleeping for {num} seconds.'.format( + time=str(datetime.datetime.now()).split('.')[0], num=SCANNER_BROKER_RECONNECTION_SLEEP_TIME + ), + file=sys.stderr, ) sleep(SCANNER_BROKER_RECONNECTION_SLEEP_TIME) continue @@ -152,21 +176,24 @@ def main(): try: sites_to_scan = update_scans_dequeue_scans(dequeue_quantity) except IOError: - print('[{time}] ERROR: Unable to retrieve lists of sites to scan. Sleeping for {num} seconds.'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=SCANNER_DATABASE_RECONNECTION_SLEEP_TIME), - file=sys.stderr + print( + '[{time}] ERROR: Unable to retrieve lists of sites to scan. Sleeping for {num} seconds.'.format( + time=str(datetime.datetime.now()).split('.')[0], num=SCANNER_DATABASE_RECONNECTION_SLEEP_TIME + ), + file=sys.stderr, ) sleep(SCANNER_DATABASE_RECONNECTION_SLEEP_TIME) continue try: if sites_to_scan: - print('[{time}] INFO: Dequeuing {num} site(s): {sites}.'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=len(sites_to_scan), - sites=', '.join([site[0] for site in sites_to_scan])), - file=sys.stderr + print( + '[{time}] INFO: Dequeuing {num} site(s): {sites}.'.format( + time=str(datetime.datetime.now()).split('.')[0], + num=len(sites_to_scan), + sites=', '.join([site[0] for site in sites_to_scan]), + ), + file=sys.stderr, ) for site in sites_to_scan: @@ -180,9 +207,10 @@ def main(): print('Exiting scanner backend') sys.exit(1) except: # this shouldn't trigger, but we don't want a scan breakage to kill the scanner - print('[{time}] ERROR: Unknown celery error.'.format( - time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr) + print( + '[{time}] ERROR: Unknown celery error.'.format(time=str(datetime.datetime.now()).split('.')[0]), + file=sys.stderr, + ) if __name__ == '__main__': diff --git a/httpobs/scanner/retriever/retriever.py b/httpobs/scanner/retriever/retriever.py index 8bbec210..2a9c391c 100644 --- a/httpobs/scanner/retriever/retriever.py +++ b/httpobs/scanner/retriever/retriever.py @@ -1,10 +1,7 @@ from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded from urllib.parse import urlparse -from httpobs.conf import (RETRIEVER_CONNECT_TIMEOUT, - RETRIEVER_CORS_ORIGIN, - RETRIEVER_READ_TIMEOUT, - RETRIEVER_USER_AGENT) +from httpobs.conf import RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_CORS_ORIGIN, RETRIEVER_READ_TIMEOUT, RETRIEVER_USER_AGENT from httpobs.scanner.utils import parse_http_equiv_headers import logging @@ -15,6 +12,7 @@ # verification is disabled. Also disable requests errors at levels lower than CRITICAL, see: # https://github.com/celery/celery/issues/3633 for crashy details from requests.packages.urllib3.exceptions import InsecureRequestWarning + requests.packages.urllib3.disable_warnings(InsecureRequestWarning) logging.getLogger('requests').setLevel(logging.CRITICAL) @@ -52,9 +50,11 @@ def __create_session(url: str, **kwargs) -> dict: # Override the User-Agent; some sites (like twitter) don't send the CSP header unless you have a modern # user agent - s.headers.update({ - 'User-Agent': RETRIEVER_USER_AGENT, - }) + s.headers.update( + { + 'User-Agent': RETRIEVER_USER_AGENT, + } + ) try: r = s.get(url, timeout=TIMEOUT) @@ -98,10 +98,12 @@ def __get(session, relative_path='/', headers=None, cookies=None): # TODO: limit the maximum size of the response, to keep malicious site operators from killing us # TODO: Perhaps we can naively do it for now by simply setting a timeout? # TODO: catch TLS errors instead of just setting it to None? - return session.get(session.url.scheme + '://' + session.url.netloc + relative_path, - headers=headers, - cookies=cookies, - timeout=TIMEOUT) + return session.get( + session.url.scheme + '://' + session.url.netloc + relative_path, + headers=headers, + cookies=cookies, + timeout=TIMEOUT, + ) # Let celery exceptions percolate upward except (SoftTimeLimitExceeded, TimeLimitExceeded): raise @@ -126,8 +128,8 @@ def __get_page_text(response: requests.Response, force: bool = False) -> str: def retrieve_all(hostname, **kwargs): - kwargs['cookies'] = kwargs.get('cookies', {}) # HTTP cookies to send, instead of from the database - kwargs['headers'] = kwargs.get('headers', {}) # HTTP headers to send, instead of from the database + kwargs['cookies'] = kwargs.get('cookies', {}) # HTTP cookies to send, instead of from the database + kwargs['headers'] = kwargs.get('headers', {}) # HTTP headers to send, instead of from the database # This way of doing it keeps the urls tidy even if makes the code ugly kwargs['http_port'] = ':' + str(kwargs.get('http_port', '')) if 'http_port' in kwargs else '' @@ -137,8 +139,7 @@ def retrieve_all(hostname, **kwargs): retrievals = { 'hostname': hostname, - 'resources': { - }, + 'resources': {}, 'responses': { 'auto': None, # whichever of 'http' or 'https' actually works, with 'https' as higher priority 'cors': None, # CORS preflight test @@ -149,12 +150,7 @@ def retrieve_all(hostname, **kwargs): } # The list of resources to get - resources = ( - '/clientaccesspolicy.xml', - '/contribute.json', - '/crossdomain.xml', - '/robots.txt' - ) + resources = ('/clientaccesspolicy.xml', '/contribute.json', '/crossdomain.xml', '/robots.txt') # Create some reusable sessions, one for HTTP and one for HTTPS http_session = __create_session('http://' + hostname + kwargs['http_port'] + kwargs['path'], **kwargs) @@ -180,9 +176,9 @@ def retrieve_all(hostname, **kwargs): retrievals['resources']['__path__'] = __get_page_text(retrievals['responses']['auto'], force=True) # Do a CORS preflight request - retrievals['responses']['cors'] = __get(retrievals['session'], - kwargs['path'], - headers={'Origin': RETRIEVER_CORS_ORIGIN}) + retrievals['responses']['cors'] = __get( + retrievals['session'], kwargs['path'], headers={'Origin': RETRIEVER_CORS_ORIGIN} + ) # Store all the files we retrieve for resource in resources: @@ -190,8 +186,10 @@ def retrieve_all(hostname, **kwargs): retrievals['resources'][resource] = __get_page_text(resp) # Parse out the HTTP meta-equiv headers - if (retrievals['responses']['auto'].headers.get('Content-Type', '').split(';')[0] in HTML_TYPES and - retrievals['resources']['__path__']): + if ( + retrievals['responses']['auto'].headers.get('Content-Type', '').split(';')[0] in HTML_TYPES + and retrievals['resources']['__path__'] + ): retrievals['responses']['auto'].http_equiv = parse_http_equiv_headers(retrievals['resources']['__path__']) else: retrievals['responses']['auto'].http_equiv = {} diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py index 03c84c04..3151aff1 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/tasks.py @@ -1,15 +1,8 @@ from celery import Celery -from celery.exceptions import ( - SoftTimeLimitExceeded, - TimeLimitExceeded, - WorkerLostError, - WorkerShutdown, - WorkerTerminate) +from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError, WorkerShutdown, WorkerTerminate from httpobs.conf import DEVELOPMENT_MODE -from httpobs.database import (insert_test_results, - select_site_headers, - update_scan_state) +from httpobs.database import insert_test_results, select_site_headers, update_scan_state from httpobs.scanner import celeryconfig, STATE_ABORTED, STATE_FAILED, STATE_RUNNING from httpobs.scanner.analyzer import tests from httpobs.scanner.retriever import retrieve_all @@ -43,11 +36,13 @@ def scan(hostname: str, site_id: int, scan_id: int): # Execute each test, replacing the underscores in the function name with dashes in the test name # TODO: Get overridden expectations - insert_test_results(site_id, - scan_id, - [test(reqs) for test in tests], - sanitize_headers(reqs['responses']['auto'].headers), - reqs['responses']['auto'].status_code) + insert_test_results( + site_id, + scan_id, + [test(reqs) for test in tests], + sanitize_headers(reqs['responses']['auto'].headers), + reqs['responses']['auto'].status_code, + ) # catch the celery timeout, which will almost certainly occur in retrieve_all() except SoftTimeLimitExceeded: @@ -67,5 +62,6 @@ def scan(hostname: str, site_id: int, scan_id: int): # Print the exception to stderr if we're in dev if DEVELOPMENT_MODE: import traceback + print('Error detected in scan for : ' + hostname) traceback.print_exc(file=sys.stderr) diff --git a/httpobs/scanner/utils.py b/httpobs/scanner/utils.py index c3eb4fb2..072ffe48 100644 --- a/httpobs/scanner/utils.py +++ b/httpobs/scanner/utils.py @@ -5,8 +5,7 @@ import sys from bs4 import BeautifulSoup as bs -from httpobs.conf import (SCANNER_ALLOW_LOCALHOST, - SCANNER_PINNED_DOMAINS) +from httpobs.conf import SCANNER_ALLOW_LOCALHOST, SCANNER_PINNED_DOMAINS from requests.structures import CaseInsensitiveDict @@ -14,9 +13,11 @@ def parse_http_equiv_headers(html: str) -> CaseInsensitiveDict: - http_equiv_headers = CaseInsensitiveDict({ - 'Content-Security-Policy': [], - }) + http_equiv_headers = CaseInsensitiveDict( + { + 'Content-Security-Policy': [], + } + ) # Try to parse the HTML try: @@ -54,13 +55,16 @@ def retrieve_store_hsts_preload_list(): r = json.loads(r) # Mapping of site -> whether it includes subdomains - hsts = {site['name']: { - 'includeSubDomains': site.get('include_subdomains', False), - 'includeSubDomainsForPinning': - site.get('include_subdomains', False) or site.get('include_subdomains_for_pinning', False), - 'mode': site.get('mode'), - 'pinned': True if 'pins' in site else False, - } for site in r['entries']} + hsts = { + site['name']: { + 'includeSubDomains': site.get('include_subdomains', False), + 'includeSubDomainsForPinning': site.get('include_subdomains', False) + or site.get('include_subdomains_for_pinning', False), + 'mode': site.get('mode'), + 'pinned': True if 'pins' in site else False, + } + for site in r['entries'] + } # Add in the manually pinned domains for pinned_domain in SCANNER_PINNED_DOMAINS: @@ -68,7 +72,7 @@ def retrieve_store_hsts_preload_list(): 'includeSubDomains': True, 'includeSubDomainsForPinning': True, 'mode': 'force-https', - 'pinned': True + 'pinned': True, } # Write json file to disk @@ -110,7 +114,7 @@ def valid_hostname(hostname: str): # First, let's try to see if it's an IPv4 address try: socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address - return None # If we get this far, it's an IP address and therefore not a valid fqdn + return None # If we get this far, it's an IP address and therefore not a valid fqdn except: pass diff --git a/httpobs/scripts/httpobs-local-scan b/httpobs/scripts/httpobs-local-scan index e44ed1bb..0c40cd00 100755 --- a/httpobs/scripts/httpobs-local-scan +++ b/httpobs/scripts/httpobs-local-scan @@ -13,36 +13,20 @@ if __name__ == "__main__": parser = argparse.ArgumentParser() # Add the various arguments - parser.add_argument('--http-port', - default=80, - help='port to use for the HTTP scan (instead of 80)', - type=int) - parser.add_argument('--https-port', - default=443, - help='port to use for the HTTPS scan (instead of 443)', - type=int) - parser.add_argument('--path', - default=argparse.SUPPRESS, - help='path to scan, instead of /', - type=str) - parser.add_argument('--no-verify', - action='store_true', - help='disable certificate verification in the HSTS/HPKP tests') - parser.add_argument('--cookies', - default=argparse.SUPPRESS, - help='cookies to send in scan (json formatted)', - type=json.loads) - parser.add_argument('--headers', - default=argparse.SUPPRESS, - help='headers to send in scan (json formatted)', - type=json.loads) - parser.add_argument('--format', - default='json', - help='output format (json or report), default of json', - type=str) - parser.add_argument('hostname', - help='host to scan (hostname only, no protocol or port)', - type=str) + parser.add_argument('--http-port', default=80, help='port to use for the HTTP scan (instead of 80)', type=int) + parser.add_argument('--https-port', default=443, help='port to use for the HTTPS scan (instead of 443)', type=int) + parser.add_argument('--path', default=argparse.SUPPRESS, help='path to scan, instead of /', type=str) + parser.add_argument( + '--no-verify', action='store_true', help='disable certificate verification in the HSTS/HPKP tests' + ) + parser.add_argument( + '--cookies', default=argparse.SUPPRESS, help='cookies to send in scan (json formatted)', type=json.loads + ) + parser.add_argument( + '--headers', default=argparse.SUPPRESS, help='headers to send in scan (json formatted)', type=json.loads + ) + parser.add_argument('--format', default='json', help='output format (json or report), default of json', type=str) + parser.add_argument('hostname', help='host to scan (hostname only, no protocol or port)', type=str) args = vars(parser.parse_args()) @@ -65,10 +49,10 @@ if __name__ == "__main__": # Because it makes sense this way if args['http_port'] == 80: - del (args['http_port']) + del args['http_port'] if args['https_port'] == 443: - del (args['https_port']) + del args['https_port'] if args.pop('no_verify'): args['verify'] = False @@ -80,19 +64,17 @@ if __name__ == "__main__": if output_format == 'json': print(json.dumps(r, indent=4, sort_keys=True)) elif output_format == 'report': - print('Score: {0} [{1}]'.format(r['scan']['score'], - r['scan']['grade'])) + print('Score: {0} [{1}]'.format(r['scan']['score'], r['scan']['grade'])) print('Modifiers:') # Get all the scores by test name - scores = [[k.replace('-', ' ').title(), v['score_modifier'], v['score_description']] - for k, v in r['tests'].items()] + scores = [ + [k.replace('-', ' ').title(), v['score_modifier'], v['score_description']] for k, v in r['tests'].items() + ] scores = sorted(scores, key=itemgetter(0)) # [('test1', -5, 'foo'), ('test2', -10, 'bar')] for score in scores: if score[1] > 0: score[1] = '+' + str(score[1]) # display 5 as +5 - print(' {test:<30} [{modifier:>3}] {reason}'.format(test=score[0], - modifier=score[1], - reason=score[2])) + print(' {test:<30} [{modifier:>3}] {reason}'.format(test=score[0], modifier=score[1], reason=score[2])) diff --git a/httpobs/scripts/httpobs-mass-scan b/httpobs/scripts/httpobs-mass-scan index 4774b231..cdabc718 100755 --- a/httpobs/scripts/httpobs-mass-scan +++ b/httpobs/scripts/httpobs-mass-scan @@ -44,8 +44,11 @@ if __name__ == '__main__': available = MAX_QUEUE - r.get('PENDING', 0) - r.get('RUNNING', 0) - r.get('STARTING', 0) - print('Queue availability: {queue_avail}. Total scanned: {total_scanned}. Pending: {pending}.'.format( - queue_avail=available, total_scanned=total_scanned, pending=r.get('PENDING', 0))) + print( + 'Queue availability: {queue_avail}. Total scanned: {total_scanned}. Pending: {pending}.'.format( + queue_avail=available, total_scanned=total_scanned, pending=r.get('PENDING', 0) + ) + ) # Quit if the scanner reports that nothing is pending if not hosts and r.get('PENDING', 0) == 0: diff --git a/httpobs/tests/unittests/test_content.py b/httpobs/tests/unittests/test_content.py index b0ef5ef6..87377cda 100644 --- a/httpobs/tests/unittests/test_content.py +++ b/httpobs/tests/unittests/test_content.py @@ -41,7 +41,9 @@ def test_contribute_too_large(self): self.assertEquals(result['data'], {}) def test_with_required_keys(self): - self.reqs['resources']['/contribute.json'] = """ + self.reqs['resources'][ + '/contribute.json' + ] = """ { "name": "Bedrock", "description": "The app powering www.mozilla.org.", @@ -86,7 +88,9 @@ def test_with_required_keys(self): self.assertTrue(result['pass']) def test_missing_required_keys(self): - self.reqs['resources']['/contribute.json'] = """ + self.reqs['resources'][ + '/contribute.json' + ] = """ { "name": "Bedrock", "description": "The app powering www.mozilla.org.", @@ -152,7 +156,9 @@ def test_not_html(self): # json, like what an API might return self.reqs['responses']['auto'].headers['Content-Type'] = 'application/json' - self.reqs['resources']['__path__'] = """ + self.reqs['resources'][ + '__path__' + ] = """ { 'foo': 'bar' } diff --git a/httpobs/tests/unittests/test_csp_parser.py b/httpobs/tests/unittests/test_csp_parser.py index 0ca40cf2..6201af95 100644 --- a/httpobs/tests/unittests/test_csp_parser.py +++ b/httpobs/tests/unittests/test_csp_parser.py @@ -8,73 +8,76 @@ def test_csp_parser(self): # one policy with one directive policy = ["default-src 'none'"] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"} - }) + self.assertEquals(parse_csp(policy), {'default-src': {"'none'"}}) # one policy with multiple directives policy = ["default-src 'none'; script-src 'self' https://mozilla.org"] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # two identical policies policy = [ "default-src 'none'; script-src 'self' https://mozilla.org", "default-src 'none'; script-src 'self' https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # two policies, one of which has a source that isn't in the other policy = [ "default-src 'none'; script-src 'self' https://mozilla.org", "default-src 'none'; script-src 'self' https://mozilla.org https://example.com", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # same thing as the previous policy, but the sources are in different orders policy = [ "default-src 'none'; script-src 'self' https://mozilla.org", "default-src 'none'; script-src https://example.com 'self' https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - 'script-src': {"'self'", 'https://mozilla.org'} - }) + self.assertEquals( + parse_csp(policy), {'default-src': {"'none'"}, 'script-src': {"'self'", 'https://mozilla.org'}} + ) # a policy with two differing websites that should end up with 'none' policy = [ "default-src https://mozilla.org", "default-src https://mozilla.com", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # a policy with four differing websites that should end up with 'none' policy = [ "default-src https://mozilla.org https://mozilla.net", "default-src https://mozilla.com https://mozilla.io", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # a policy with a bunch of websites, with only two in common policy = [ "default-src https://mozilla.org https://mozilla.net https://mozilla.com https://mozilla.io", "default-src https://mozilla.pizza https://mozilla.ninja https://mozilla.net https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"https://mozilla.net", "https://mozilla.org"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"https://mozilla.net", "https://mozilla.org"}, + }, + ) # a four policies with a bunch of websites, with only two in common policy = [ @@ -83,27 +86,36 @@ def test_csp_parser(self): "default-src https://mozilla.net https://mozilla.fox https://mozilla.fire https://mozilla.org", "default-src https://mozilla.browser https://mozilla.web https://mozilla.net https://mozilla.org", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"https://mozilla.net", "https://mozilla.org"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"https://mozilla.net", "https://mozilla.org"}, + }, + ) # a policy with http: and https:, two differing sources that should end up with 'none' policy = [ "default-src http:", "default-src https:", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # a policy with http: and https:, two differing sources that should end up with 'none' policy = [ "default-src http: http:", "default-src https: https:", ] - self.assertEquals(parse_csp(policy), { - 'default-src': {"'none'"}, - }) + self.assertEquals( + parse_csp(policy), + { + 'default-src': {"'none'"}, + }, + ) # policies that are too short policies = ( diff --git a/httpobs/tests/unittests/test_grades.py b/httpobs/tests/unittests/test_grades.py index 4d7bc555..74eb4ee7 100644 --- a/httpobs/tests/unittests/test_grades.py +++ b/httpobs/tests/unittests/test_grades.py @@ -5,8 +5,10 @@ class TestGrader(TestCase): def test_get_score_description(self): - self.assertEquals('Preloaded via the HTTP Public Key Pinning (HPKP) preloading process', - get_score_description('hpkp-preloaded')) + self.assertEquals( + 'Preloaded via the HTTP Public Key Pinning (HPKP) preloading process', + get_score_description('hpkp-preloaded'), + ) def test_get_score_modifier(self): self.assertEquals(0, get_score_modifier('hpkp-preloaded')) diff --git a/httpobs/tests/unittests/test_headers.py b/httpobs/tests/unittests/test_headers.py index 6e204727..cb2720ef 100644 --- a/httpobs/tests/unittests/test_headers.py +++ b/httpobs/tests/unittests/test_headers.py @@ -1,14 +1,16 @@ from http.cookiejar import Cookie from unittest import TestCase -from httpobs.scanner.analyzer.headers import (content_security_policy, - cookies, - public_key_pinning, - referrer_policy, - strict_transport_security, - x_content_type_options, - x_frame_options, - x_xss_protection) +from httpobs.scanner.analyzer.headers import ( + content_security_policy, + cookies, + public_key_pinning, + referrer_policy, + strict_transport_security, + x_content_type_options, + x_frame_options, + x_xss_protection, +) from httpobs.tests.utils import empty_requests, set_header @@ -115,8 +117,7 @@ def test_unsafe_inline(self): def test_unsafe_eval(self): reqs = empty_requests() - set_header(reqs['responses']['auto'], 'Content-Security-Policy', - "default-src 'none'; script-src 'unsafe-eval'") + set_header(reqs['responses']['auto'], 'Content-Security-Policy', "default-src 'none'; script-src 'unsafe-eval'") result = content_security_policy(reqs) @@ -126,16 +127,18 @@ def test_unsafe_eval(self): self.assertTrue(result['policy']['unsafeEval']) def test_unsafe_inline_in_style_src_only(self): - values = ("object-src 'none'; script-src 'none'; style-src 'unsafe-inline'", - "default-src 'none'; script-src https://mozilla.org; style-src 'unsafe-inline'", - "default-src 'unsafe-inline'; script-src https://mozilla.org", - "default-src 'none';;; ;;;style-src 'self' 'unsafe-inline'", - "default-src 'none'; style-src data:", - "default-src 'none'; style-src *", - "default-src 'none'; style-src https:", - "default-src 'none'; style-src 'unsafe-inline'; " + - "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' " + - "'unsafe-inline'") + values = ( + "object-src 'none'; script-src 'none'; style-src 'unsafe-inline'", + "default-src 'none'; script-src https://mozilla.org; style-src 'unsafe-inline'", + "default-src 'unsafe-inline'; script-src https://mozilla.org", + "default-src 'none';;; ;;;style-src 'self' 'unsafe-inline'", + "default-src 'none'; style-src data:", + "default-src 'none'; style-src *", + "default-src 'none'; style-src https:", + "default-src 'none'; style-src 'unsafe-inline'; " + + "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' " + + "'unsafe-inline'", + ) for value in values: reqs = empty_requests() @@ -150,20 +153,21 @@ def test_unsafe_inline_in_style_src_only(self): def test_no_unsafe(self): # See https://github.com/mozilla/http-observatory/issues/88 and # https://github.com/mozilla/http-observatory/issues/277 for 'unsafe-inline' + hash/nonce - values = ("default-src https://mozilla.org", - "default-src https://mozilla.org;;; ;;;script-src 'none'", - "object-src 'none'; script-src https://mozilla.org; " + - "style-src https://mozilla.org; upgrade-insecure-requests;", - "object-src 'none'; script-src 'strict-dynamic' 'nonce-abc' 'unsafe-inline'; style-src 'none'", - "object-src 'none'; style-src 'self';" + - "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='", - "object-src 'none'; style-src 'self'; script-src 'unsafe-inline' " + - "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='" + - "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB='", - "object-src 'none'; script-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'; style-src 'none'", - "default-src https://mozilla.org; style-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'", - "default-src https://mozilla.org; style-src 'unsafe-inline' " + - "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' 'unsafe-inline'") + values = ( + "default-src https://mozilla.org", + "default-src https://mozilla.org;;; ;;;script-src 'none'", + "object-src 'none'; script-src https://mozilla.org; " + + "style-src https://mozilla.org; upgrade-insecure-requests;", + "object-src 'none'; script-src 'strict-dynamic' 'nonce-abc' 'unsafe-inline'; style-src 'none'", + "object-src 'none'; style-src 'self';" + "script-src 'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='", + "object-src 'none'; style-src 'self'; script-src 'unsafe-inline' " + + "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBA='" + + "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB='", + "object-src 'none'; script-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'; style-src 'none'", + "default-src https://mozilla.org; style-src 'unsafe-inline' 'nonce-abc123' 'unsafe-inline'", + "default-src https://mozilla.org; style-src 'unsafe-inline' " + + "'sha256-hqBEA/HXB3aJU2FgOnYN8rkAgEVgyfi3Vs1j2/XMPBB=' 'unsafe-inline'", + ) for value in values: reqs = empty_requests() @@ -175,7 +179,6 @@ def test_no_unsafe(self): self.assertTrue(result['pass']) def test_no_unsafe_default_src_none(self): - # An HTTP header (default-src http:) and HTTP equiv (default-src https:), with differing values # that should end up as default-src 'none' reqs = empty_requests('test_parse_http_equiv_headers_csp2.html') @@ -189,8 +192,8 @@ def test_no_unsafe_default_src_none(self): values = ( "default-src", # no value == 'none' TODO: Fix this "default-src 'none'; script-src 'strict-dynamic' 'nonce-abc123' 'unsafe-inline'", - "default-src 'none'; script-src https://mozilla.org;" + - "style-src https://mozilla.org; upgrade-insecure-requests;", + "default-src 'none'; script-src https://mozilla.org;" + + "style-src https://mozilla.org; upgrade-insecure-requests;", "default-src 'none'; object-src https://mozilla.org", ) @@ -368,82 +371,90 @@ def test_missing(self): def test_secure_with_httponly_sessions(self): # Python cookies are the literal worst, seriously, the worst - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='foo', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='foo', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) # See: https://github.com/mozilla/http-observatory/issues/121 for the __cfduid insanity - cookie = Cookie(name='__cfduid', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rest={}, - rfc2109=False, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='__cfduid', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rest={}, + rfc2109=False, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) # See: https://github.com/mozilla/http-observatory/issues/282 for the heroku-session-affinity insanity - cookie = Cookie(name='heroku-session-affinity', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rest={}, - rfc2109=False, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='heroku-session-affinity', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rest={}, + rfc2109=False, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -453,239 +464,266 @@ def test_secure_with_httponly_sessions(self): self.assertFalse(result['sameSite']) def test_secure_with_httponly_sessions_and_samesite(self): - cookie = Cookie(name='SESSIONID_SAMESITE_STRICT', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Strict'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_STRICT', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Strict'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_LAX', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Lax'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_LAX', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Lax'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_NONE', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'None'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_NONE', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'None'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) self.assertEquals('cookies-secure-with-httponly-sessions-and-samesite', result['result']) - self.assertEquals({ - 'SESSIONID_SAMESITE_STRICT': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Strict', - 'secure': True}, - 'SESSIONID_SAMESITE_LAX': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Lax', - 'secure': True}, - 'SESSIONID_SAMESITE_NONE': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'None', - 'secure': True} - }, - result['data']) + self.assertEquals( + { + 'SESSIONID_SAMESITE_STRICT': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Strict', + 'secure': True, + }, + 'SESSIONID_SAMESITE_LAX': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Lax', + 'secure': True, + }, + 'SESSIONID_SAMESITE_NONE': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'None', + 'secure': True, + }, + }, + result['data'], + ) self.assertTrue(result['pass']) self.assertTrue(result['sameSite']) def test_secure_with_httponly_sessions_and_samesite_not_awarded_if_not_all_cookies_samesite(self): - cookie = Cookie(name='SESSIONID_SAMESITE_STRICT', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Strict'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_STRICT', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Strict'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_NO_SAMESITE', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_NO_SAMESITE', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_LAX', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Lax'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_LAX', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Lax'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) - cookie = Cookie(name='SESSIONID_SAMESITE_NONE', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'None'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID_SAMESITE_NONE', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'None'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) self.assertEquals('cookies-secure-with-httponly-sessions', result['result']) - self.assertEquals({ - 'SESSIONID_SAMESITE_STRICT': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Strict', - 'secure': True}, - 'SESSIONID_NO_SAMESITE': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': False, - 'secure': True}, - 'SESSIONID_SAMESITE_LAX': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'Lax', - 'secure': True}, - 'SESSIONID_SAMESITE_NONE': { - 'domain': 'mozilla.com', - 'expires': None, - 'httponly': True, - 'max-age': None, - 'path': '/', - 'port': 443, - 'samesite': 'None', - 'secure': True} - }, - result['data']) + self.assertEquals( + { + 'SESSIONID_SAMESITE_STRICT': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Strict', + 'secure': True, + }, + 'SESSIONID_NO_SAMESITE': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': False, + 'secure': True, + }, + 'SESSIONID_SAMESITE_LAX': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'Lax', + 'secure': True, + }, + 'SESSIONID_SAMESITE_NONE': { + 'domain': 'mozilla.com', + 'expires': None, + 'httponly': True, + 'max-age': None, + 'path': '/', + 'port': 443, + 'samesite': 'None', + 'secure': True, + }, + }, + result['data'], + ) self.assertTrue(result['pass']) self.assertFalse(result['sameSite']) def test_anticsrf_without_samesite(self): - cookie = Cookie(name='CSRFTOKEN', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='CSRFTOKEN', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -695,23 +733,25 @@ def test_anticsrf_without_samesite(self): self.assertFalse(result['sameSite']) def test_samesite_invalid_empty(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': None}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': None}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -721,23 +761,25 @@ def test_samesite_invalid_empty(self): self.assertIsNone(result['sameSite']) def test_samesite_invalid_true(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': True}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': True}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -747,23 +789,25 @@ def test_samesite_invalid_true(self): self.assertIsNone(result['sameSite']) def test_samesite_invalid(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True, 'SameSite': 'Invalid'}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True, 'SameSite': 'Invalid'}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -773,23 +817,25 @@ def test_samesite_invalid(self): self.assertIsNone(result['sameSite']) def test_regular_cookie_no_secure_but_hsts(self): - cookie = Cookie(name='foo', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='foo', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) self.reqs['responses']['https'].headers['Strict-Transport-Security'] = 'max-age=15768000' @@ -800,23 +846,25 @@ def test_regular_cookie_no_secure_but_hsts(self): self.assertFalse(result['sameSite']) def test_session_cookie_no_secure_but_hsts(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) self.reqs['responses']['https'].headers['Strict-Transport-Security'] = 'max-age=15768000' @@ -827,23 +875,25 @@ def test_session_cookie_no_secure_but_hsts(self): self.assertFalse(result['sameSite']) def test_no_secure(self): - cookie = Cookie(name='foo', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='foo', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -853,23 +903,25 @@ def test_no_secure(self): self.assertFalse(result['sameSite']) def test_session_no_httponly(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=True, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=True, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -879,23 +931,25 @@ def test_session_no_httponly(self): self.assertFalse(result['sameSite']) def test_session_no_secure(self): - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={'HttpOnly': True}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={'HttpOnly': True}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -905,23 +959,25 @@ def test_session_no_secure(self): self.assertFalse(result['sameSite']) # https://github.com/mozilla/http-observatory/issues/97 - cookie = Cookie(name='SESSIONID', - comment=None, - comment_url=None, - discard=False, - domain='mozilla.com', - domain_initial_dot=False, - domain_specified='mozilla.com', - expires=None, - path='/', - path_specified='/', - port=443, - port_specified=443, - rfc2109=False, - rest={}, - secure=False, - version=1, - value='bar') + cookie = Cookie( + name='SESSIONID', + comment=None, + comment_url=None, + discard=False, + domain='mozilla.com', + domain_initial_dot=False, + domain_specified='mozilla.com', + expires=None, + path='/', + path_specified='/', + port=443, + port_specified=443, + rfc2109=False, + rest={}, + secure=False, + version=1, + value='bar', + ) self.reqs['session'].cookies.set_cookie(cookie) result = cookies(self.reqs) @@ -958,7 +1014,8 @@ def test_header_invalid(self): self.reqs['responses']['https'].headers['Public-Key-Pins'] = ( 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) self.assertEquals('hpkp-header-invalid', result['result']) @@ -970,7 +1027,8 @@ def test_header_invalid(self): self.reqs['responses']['https'].headers['Public-Key-Pins'] = ( 'max-age=15768000; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) @@ -995,7 +1053,8 @@ def test_invalid_cert(self): 'includeSubDomains; ' 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) self.reqs['responses']['https'].verified = False result = public_key_pinning(self.reqs) @@ -1008,7 +1067,8 @@ def test_max_age_too_low(self): 'max-age=86400; ' 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) @@ -1021,7 +1081,8 @@ def test_implemented(self): 'includeSubDomains; ' 'pin-sha256="E9CZ9INDbd+2eRQozYqqbQ2yXLVKB9+xcprMF+44U1g="; ' 'pin-sha256="LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ="; ' - 'report-uri="http://example.com/pkp-report"') + 'report-uri="http://example.com/pkp-report"' + ) result = public_key_pinning(self.reqs) @@ -1069,11 +1130,13 @@ def tearDown(self): self.reqs = None def test_header_private(self): - for policy in ['no-referrer', - 'same-origin', - 'strict-origin', - 'STRICT-ORIGIN', - 'strict-origin-when-cross-origin']: + for policy in [ + 'no-referrer', + 'same-origin', + 'strict-origin', + 'STRICT-ORIGIN', + 'strict-origin-when-cross-origin', + ]: self.reqs['responses']['auto'].headers['Referrer-Policy'] = policy result = referrer_policy(self.reqs) @@ -1133,8 +1196,10 @@ def test_header_unsafe(self): self.assertFalse(result['pass']) def test_multiple_value_header_all_valid(self): - valid_but_unsafe_policies = ['origin-when-cross-origin, no-referrer, unsafe-url', # safe in the middle - 'no-referrer, unsafe-url'] # safe at the beginning + valid_but_unsafe_policies = [ + 'origin-when-cross-origin, no-referrer, unsafe-url', # safe in the middle + 'no-referrer, unsafe-url', + ] # safe at the beginning for policy in valid_but_unsafe_policies: self.reqs['responses']['auto'].headers['Referrer-Policy'] = policy @@ -1182,8 +1247,9 @@ def test_header_invalid(self): self.assertFalse(result['pass']) # If the header is set twice - self.reqs['responses']['https'].headers['Strict-Transport-Security'] = \ - 'max-age=15768000; includeSubDomains, max-age=15768000; includeSubDomains' + self.reqs['responses']['https'].headers[ + 'Strict-Transport-Security' + ] = 'max-age=15768000; includeSubDomains, max-age=15768000; includeSubDomains' result = strict_transport_security(self.reqs) @@ -1201,8 +1267,9 @@ def test_no_https(self): self.assertFalse(result['pass']) def test_invalid_cert(self): - self.reqs['responses']['https'].headers['Strict-Transport-Security'] = \ - 'max-age=15768000; includeSubDomains; preload' + self.reqs['responses']['https'].headers[ + 'Strict-Transport-Security' + ] = 'max-age=15768000; includeSubDomains; preload' self.reqs['responses']['https'].verified = False result = strict_transport_security(self.reqs) @@ -1219,8 +1286,9 @@ def test_max_age_too_low(self): self.assertFalse(result['pass']) def test_implemented(self): - self.reqs['responses']['https'].headers['Strict-Transport-Security'] = \ - 'max-age=15768000; includeSubDomains; preload' + self.reqs['responses']['https'].headers[ + 'Strict-Transport-Security' + ] = 'max-age=15768000; includeSubDomains; preload' result = strict_transport_security(self.reqs) @@ -1364,10 +1432,7 @@ def test_missing(self): self.assertFalse(result['pass']) def test_header_invalid(self): - for value in ('whimsy', - '2; mode=block', - '1; mode=block; mode=block', - '1; mode=block, 1; mode=block'): + for value in ('whimsy', '2; mode=block', '1; mode=block; mode=block', '1; mode=block, 1; mode=block'): self.reqs['responses']['auto'].headers['X-XSS-Protection'] = value result = x_xss_protection(self.reqs) diff --git a/httpobs/tests/unittests/test_misc.py b/httpobs/tests/unittests/test_misc.py index 61b1072a..4a6ef6ef 100644 --- a/httpobs/tests/unittests/test_misc.py +++ b/httpobs/tests/unittests/test_misc.py @@ -46,8 +46,9 @@ def test_acao_restricted_with_acao(self): def test_acao_universal_with_acao(self): self.reqs['responses']['cors'].request.headers['Origin'] = 'https://http-observatory.security.mozilla.org' - self.reqs['responses']['cors'].headers['Access-Control-Allow-Origin'] = \ - 'https://http-observatory.security.mozilla.org' + self.reqs['responses']['cors'].headers[ + 'Access-Control-Allow-Origin' + ] = 'https://http-observatory.security.mozilla.org' self.reqs['responses']['cors'].headers['Access-Control-Allow-Credentials'] = 'true' result = cross_origin_resource_sharing(self.reqs) @@ -56,7 +57,9 @@ def test_acao_universal_with_acao(self): self.assertFalse(result['pass']) def test_acao_restricted_with_crossdomain(self): - self.reqs['resources']['/crossdomain.xml'] = """ + self.reqs['resources'][ + '/crossdomain.xml' + ] = """ @@ -69,7 +72,9 @@ def test_acao_restricted_with_crossdomain(self): self.assertTrue(result['pass']) def test_acao_universal_with_crossdomain(self): - self.reqs['resources']['/crossdomain.xml'] = """ + self.reqs['resources'][ + '/crossdomain.xml' + ] = """ """ @@ -80,7 +85,9 @@ def test_acao_universal_with_crossdomain(self): self.assertFalse(result['pass']) def test_acao_restricted_with_clientaccess(self): - self.reqs['resources']['/clientaccesspolicy.xml'] = """ + self.reqs['resources'][ + '/clientaccesspolicy.xml' + ] = """ @@ -95,12 +102,13 @@ def test_acao_restricted_with_clientaccess(self): result = cross_origin_resource_sharing(self.reqs) self.assertEquals('cross-origin-resource-sharing-implemented-with-restricted-access', result['result']) - self.assertEquals(['http-observatory.security.mozilla.org', 'github.com'], - result['data']['clientaccesspolicy']) + self.assertEquals(['http-observatory.security.mozilla.org', 'github.com'], result['data']['clientaccesspolicy']) self.assertTrue(result['pass']) def test_acao_universal_with_clientaccess(self): - self.reqs['resources']['/clientaccesspolicy.xml'] = """ + self.reqs['resources'][ + '/clientaccesspolicy.xml' + ] = """ @@ -164,8 +172,10 @@ def test_redirects_to_https(self): result = redirection(self.reqs) self.assertEquals('redirection-to-https', result['result']) - self.assertEquals(['http://http-observatory.security.mozilla.org/', - 'https://http-observatory.security.mozilla.org/'], result['route']) + self.assertEquals( + ['http://http-observatory.security.mozilla.org/', 'https://http-observatory.security.mozilla.org/'], + result['route'], + ) self.assertTrue(result['pass']) def test_redirects_to_https_with_port_number(self): @@ -181,8 +191,10 @@ def test_redirects_to_https_with_port_number(self): result = redirection(self.reqs) self.assertEquals('redirection-to-https', result['result']) - self.assertEquals(['http://http-observatory.security.mozilla.org/', - 'https://http-observatory.security.mozilla.org:443/'], result['route']) + self.assertEquals( + ['http://http-observatory.security.mozilla.org/', 'https://http-observatory.security.mozilla.org:443/'], + result['route'], + ) self.assertTrue(result['pass']) def test_redirects_invalid_cert(self): @@ -237,10 +249,12 @@ def test_first_redirection_off_host(self): def test_all_redirections_preloaded(self): self.reqs['responses']['http'].url = 'https://www.pokeinthe.io/foo/bar' - for url in ('http://pokeinthe.io/', - 'https://pokeinthe.io/', - 'https://www.pokeinthe.io/', - 'https://baz.pokeinthe.io/foo'): + for url in ( + 'http://pokeinthe.io/', + 'https://pokeinthe.io/', + 'https://www.pokeinthe.io/', + 'https://baz.pokeinthe.io/foo', + ): history = UserDict() history.request = UserDict() history.request.url = url diff --git a/httpobs/tests/unittests/test_parse_http_equiv_headers.py b/httpobs/tests/unittests/test_parse_http_equiv_headers.py index ba27d2ac..9e0dbc74 100644 --- a/httpobs/tests/unittests/test_parse_http_equiv_headers.py +++ b/httpobs/tests/unittests/test_parse_http_equiv_headers.py @@ -13,8 +13,7 @@ def tearDown(self): def test_header_match(self): reqs = empty_requests('test_parse_http_equiv_headers_csp1.html') - self.assertEquals(reqs['responses']['auto'].http_equiv, - {'Content-Security-Policy': ['default-src \'none\';']}) + self.assertEquals(reqs['responses']['auto'].http_equiv, {'Content-Security-Policy': ['default-src \'none\';']}) def test_header_case_insensitivity(self): reqs = empty_requests('test_parse_http_equiv_headers_csp1.html') @@ -25,9 +24,12 @@ def test_header_case_insensitivity(self): def test_multiple_http_equivs(self): reqs = empty_requests('test_parse_http_equiv_headers_csp_multiple_http_equiv1.html') - self.assertEquals(reqs['responses']['auto'].http_equiv['Content-Security-Policy'], [ - "default-src 'none'; object-src 'none'; media-src 'none';", - "connect-src 'self'; font-src 'self'; child-src 'self'", - "img-src 'self'; style-src 'self' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ", - "script-src 'self' 'unsafe-inline' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ'", - ]) + self.assertEquals( + reqs['responses']['auto'].http_equiv['Content-Security-Policy'], + [ + "default-src 'none'; object-src 'none'; media-src 'none';", + "connect-src 'self'; font-src 'self'; child-src 'self'", + "img-src 'self'; style-src 'self' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ", + "script-src 'self' 'unsafe-inline' 'nonce-gAeQO8jI4VJCsrsXkcUVRCzQjiihKteQ'", + ], + ) diff --git a/httpobs/tests/unittests/test_retriever.py b/httpobs/tests/unittests/test_retriever.py index 1df50d0c..bbfb79d3 100644 --- a/httpobs/tests/unittests/test_retriever.py +++ b/httpobs/tests/unittests/test_retriever.py @@ -58,5 +58,5 @@ def test_multiple_csp_headers_in_http(self): self.assertEquals( get_duplicate_header_values(reqs['responses']['auto'], 'Content-Security-Policy'), - ["script-src 'unsafe-inline'", 'img-src https://google.com'] + ["script-src 'unsafe-inline'", 'img-src https://google.com'], ) diff --git a/httpobs/tests/unittests/test_sanitize_headers.py b/httpobs/tests/unittests/test_sanitize_headers.py index 61d7c104..0e337d97 100644 --- a/httpobs/tests/unittests/test_sanitize_headers.py +++ b/httpobs/tests/unittests/test_sanitize_headers.py @@ -6,17 +6,11 @@ class TestValidHostname(TestCase): def test_valid_size_headers(self): # TODO: Try to find a site with www.site.foo but not site.foo - headers = { - 'Content-Type': 'text/html', - 'Location': '/whatever' - } + headers = {'Content-Type': 'text/html', 'Location': '/whatever'} self.assertEquals(headers, sanitize_headers(headers)) def test_huge_headers(self): - headers = { - 'Content-Type': 'text/html', - 'Location': '/whatever' * 10000 - } + headers = {'Content-Type': 'text/html', 'Location': '/whatever' * 10000} self.assertIsNone(sanitize_headers(headers)) diff --git a/httpobs/website/__init__.py b/httpobs/website/__init__.py index ea91afc4..dc2d3f42 100644 --- a/httpobs/website/__init__.py +++ b/httpobs/website/__init__.py @@ -1,4 +1,3 @@ from httpobs.website.decorators import add_response_headers, sanitized_api_response -__all__ = ['add_response_headers', - 'sanitized_api_response'] +__all__ = ['add_response_headers', 'sanitized_api_response'] diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 87fdcac7..f89a8952 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -17,6 +17,7 @@ # TODO: Implement API to write public and private headers to the database + @api.route('/api/v1/analyze', methods=['GET', 'OPTIONS', 'POST']) @add_response_headers(cors=True) @sanitized_api_response @@ -69,14 +70,15 @@ def api_post_scan_hostname(): return { 'error': 'recent-scan-not-found', 'text': 'Recently completed scan for {hostname} not found'.format( - hostname=request.args.get('host', '')) + hostname=request.args.get('host', '') + ), } # If there was a rescan attempt and it returned a row, it's because the rescan was done within the cooldown window elif rescan and request.method == 'POST': return { 'error': 'rescan-attempt-too-soon', - 'text': '{hostname} is on temporary cooldown'.format(hostname=request.args.get('host', '')) + 'text': '{hostname} is on temporary cooldown'.format(hostname=request.args.get('host', '')), } # Return the scan row @@ -120,8 +122,9 @@ def api_get_host_history(): return jsonify({'error': 'No history found'}) # Prune for when the score doesn't change; thanks to chuck for the elegant list comprehension - pruned_history = [v for k, v in enumerate(history) if history[k].get('score') is not history[k - 1].get('score') or - k == 0] + pruned_history = [ + v for k, v in enumerate(history) if history[k].get('score') is not history[k - 1].get('score') or k == 0 + ] # Return the host history return jsonify(pruned_history) @@ -142,9 +145,9 @@ def api_get_recent_scans(): except ValueError: return {'error': 'invalid-parameters'} - return jsonify(database.select_scan_recent_finished_scans(num_scans=num_scans, - min_score=min_score, - max_score=max_score)) + return jsonify( + database.select_scan_recent_finished_scans(num_scans=num_scans, min_score=min_score, max_score=max_score) + ) # TODO: Deprecate @@ -185,31 +188,38 @@ def api_get_scanner_stats(): stats['most_recent_scan_datetime'] = http_date(stats['most_recent_scan_datetime'].utctimetuple()) stats['recent_scans'] = {http_date(i.utctimetuple()): v for i, v in stats['recent_scans']} - resp = make_response(json.dumps({ - 'gradeDistribution': { - 'latest': grade_distribution, - 'all': grade_distribution_all_scans, - }, - 'gradeImprovements': grade_improvements, - 'misc': { - 'mostRecentScanDate': stats['most_recent_scan_datetime'], - 'numHoursWithoutScansInLast24Hours': 24 - len(stats['recent_scans']) if verbose else -1, - 'numImprovedSites': sum([v for k, v in grade_improvements_all.items() if k > 0]), - 'numScans': stats['scan_count'], - 'numScansLast24Hours': sum(stats['recent_scans'].values()) if verbose else -1, - 'numSuccessfulScans': sum(grade_distribution_all_scans.values()), - 'numUniqueSites': sum(grade_improvements_all.values()) - }, - 'recent': { - 'scans': { - 'best': database.select_scan_recent_finished_scans(13, 90, 1000), # 13, as there are 13 grades - 'recent': database.select_scan_recent_finished_scans(13, 0, 1000), # 13, as there are 13 grades - 'worst': database.select_scan_recent_finished_scans(13, 0, 20), # 13, as there are 13 grades - 'numPerHourLast24Hours': stats['recent_scans'], + resp = make_response( + json.dumps( + { + 'gradeDistribution': { + 'latest': grade_distribution, + 'all': grade_distribution_all_scans, + }, + 'gradeImprovements': grade_improvements, + 'misc': { + 'mostRecentScanDate': stats['most_recent_scan_datetime'], + 'numHoursWithoutScansInLast24Hours': 24 - len(stats['recent_scans']) if verbose else -1, + 'numImprovedSites': sum([v for k, v in grade_improvements_all.items() if k > 0]), + 'numScans': stats['scan_count'], + 'numScansLast24Hours': sum(stats['recent_scans'].values()) if verbose else -1, + 'numSuccessfulScans': sum(grade_distribution_all_scans.values()), + 'numUniqueSites': sum(grade_improvements_all.values()), + }, + 'recent': { + 'scans': { + 'best': database.select_scan_recent_finished_scans(13, 90, 1000), # 13, as there are 13 grades + 'recent': database.select_scan_recent_finished_scans(13, 0, 1000), # 13, as there are 13 grades + 'worst': database.select_scan_recent_finished_scans(13, 0, 20), # 13, as there are 13 grades + 'numPerHourLast24Hours': stats['recent_scans'], + }, + }, + 'states': {state: stats['states'].get(state, 0) for state in STATES}, }, - }, - 'states': {state: stats['states'].get(state, 0) for state in STATES}, - }, indent=4 if pretty else None, sort_keys=pretty, default=str)) + indent=4 if pretty else None, + sort_keys=pretty, + default=str, + ) + ) resp.mimetype = 'application/json' diff --git a/httpobs/website/decorators.py b/httpobs/website/decorators.py index cb879ee0..6bf22825 100644 --- a/httpobs/website/decorators.py +++ b/httpobs/website/decorators.py @@ -14,8 +14,9 @@ def add_response_headers(headers=None, default_headers=None, cors=False): if not default_headers: default_headers = { - 'Content-Security-Policy': ("default-src 'none'; base-uri 'none'; " - "form-action 'none'; frame-ancestors 'none'"), + 'Content-Security-Policy': ( + "default-src 'none'; base-uri 'none'; " "form-action 'none'; frame-ancestors 'none'" + ), 'Referrer-Policy': 'no-referrer', 'Strict-Transport-Security': 'max-age=63072000', 'X-Content-Type-Options': 'nosniff', @@ -35,16 +36,19 @@ def wrapper(*args, **kwargs): # Append the CORS headers if cors: - headers.update({ - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Methods': ', '.join(request.url_rule.methods), - 'Access-Control-Max-Age': '86400', - }) + headers.update( + { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Methods': ', '.join(request.url_rule.methods), + 'Access-Control-Max-Age': '86400', + } + ) # Append the headers to the response for header, value in headers.items(): resp.headers[header] = value return resp + return wrapper return decorator @@ -55,11 +59,34 @@ def sanitized_api_response(fn): def wrapper(*args, **kwargs): output = fn(*args, **kwargs) - SCAN_VALID_KEYS = ('algorithm_version', 'end_time', 'error', 'grade', 'hidden', 'likelihood_indicator', - 'response_headers', 'scan_id', 'score', 'start_time', 'state', 'status_code', - 'tests_completed', 'tests_failed', 'tests_passed', 'tests_quantity') - TEST_RESULT_VALID_KEYS = ('error', 'expectation', 'name', 'output', 'pass', 'result', - 'score_description', 'score_modifier') + SCAN_VALID_KEYS = ( + 'algorithm_version', + 'end_time', + 'error', + 'grade', + 'hidden', + 'likelihood_indicator', + 'response_headers', + 'scan_id', + 'score', + 'start_time', + 'state', + 'status_code', + 'tests_completed', + 'tests_failed', + 'tests_passed', + 'tests_quantity', + ) + TEST_RESULT_VALID_KEYS = ( + 'error', + 'expectation', + 'name', + 'output', + 'pass', + 'result', + 'score_description', + 'score_modifier', + ) # Convert it to a dict (in case it's a DictRow) output = dict(output) @@ -81,4 +108,5 @@ def wrapper(*args, **kwargs): output[test] = {k: output[test][k] for k in output[test] if k in TEST_RESULT_VALID_KEYS} return jsonify(output) + return wrapper diff --git a/httpobs/website/main.py b/httpobs/website/main.py index d253d6f4..4481411a 100644 --- a/httpobs/website/main.py +++ b/httpobs/website/main.py @@ -27,5 +27,4 @@ def main() -> str: if __name__ == '__main__': - app.run(debug=DEVELOPMENT_MODE, - port=API_PORT) + app.run(debug=DEVELOPMENT_MODE, port=API_PORT) diff --git a/httpobs/website/monitoring.py b/httpobs/website/monitoring.py index 963d8a12..1358c29a 100644 --- a/httpobs/website/monitoring.py +++ b/httpobs/website/monitoring.py @@ -27,5 +27,4 @@ def lbheartbeat(): @monitoring_api.route('/__version__') def version(): - return jsonify({'source': SOURCE_URL, - 'version': VERSION}) + return jsonify({'source': SOURCE_URL, 'version': VERSION}) diff --git a/setup.py b/setup.py index a605959b..921668e1 100644 --- a/setup.py +++ b/setup.py @@ -61,10 +61,12 @@ 'publicsuffixlist==0.7.12', 'requests==2.27.1', 'Flask==1.0.2', - 'uWSGI==2.0.17.1' + 'uWSGI==2.0.17.1', + ], + scripts=[ + 'httpobs/scripts/httpobs-local-scan', + 'httpobs/scripts/httpobs-mass-scan', + 'httpobs/scripts/httpobs-scan-worker', ], - scripts=['httpobs/scripts/httpobs-local-scan', - 'httpobs/scripts/httpobs-mass-scan', - 'httpobs/scripts/httpobs-scan-worker'], zip_safe=False, ) From e4ae9ab4643fc88d259a6f183dab4e749b01ee79 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 18:03:19 +0000 Subject: [PATCH 08/31] add isort formatter --- .pre-commit-config.yaml | 4 ++++ poetry.lock | 16 +++++++++++++++- pyproject.toml | 6 ++++++ 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b855a7fa..67b0c942 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,6 +8,10 @@ repos: - id: end-of-file-fixer - id: check-yaml - id: check-added-large-files + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort - repo: https://github.com/psf/black-pre-commit-mirror rev: 23.12.1 hooks: diff --git a/poetry.lock b/poetry.lock index 43092451..945e3cc6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -526,6 +526,20 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + [[package]] name = "itsdangerous" version = "2.1.2" @@ -1115,4 +1129,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "0e91d0f8f44fe626d8a2b25316aa26aaeeb58792af6602245b7f0183c74f8457" +content-hash = "ecf08c0bda33050ffd997762e7100b24982c93006fb1f94f703be7f372bb96c4" diff --git a/pyproject.toml b/pyproject.toml index d0a9ea36..6150e701 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,6 +24,7 @@ Flask = "^3.0.0" uWSGI = "^2.0.22" pre-commit = "^3.6.0" black = "^23.12.1" +isort = "^5.13.2" [build-system] requires = ["poetry-core"] @@ -34,3 +35,8 @@ target-version = ["py311"] required-version = "23" line-length = 120 skip-string-normalization = true + +[tool.isort] +profile = "black" +skip_gitignore = true +line_length = 120 From 73b2a811edc4f366516c3a63da73fc5e769761e7 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 18:05:17 +0000 Subject: [PATCH 09/31] format all files with isort --- httpobs/conf/__init__.py | 4 +--- httpobs/database/database.py | 14 +++++++------- httpobs/scanner/analyzer/__init__.py | 2 +- httpobs/scanner/analyzer/content.py | 8 +++----- httpobs/scanner/analyzer/headers.py | 1 - httpobs/scanner/analyzer/misc.py | 3 ++- httpobs/scanner/analyzer/utils.py | 1 - httpobs/scanner/celeryconfig.py | 1 - httpobs/scanner/grader/__init__.py | 7 +++---- httpobs/scanner/local.py | 1 - httpobs/scanner/main.py | 12 ++++++------ httpobs/scanner/retriever/retriever.py | 11 +++++------ httpobs/scanner/tasks.py | 7 +++---- httpobs/scanner/utils.py | 4 ++-- httpobs/scripts/httpobs-local-scan | 4 +--- httpobs/scripts/httpobs-mass-scan | 9 +++++---- httpobs/tests/unittests/test_retriever.py | 4 ++-- httpobs/tests/utils.py | 6 +++--- httpobs/website/api.py | 15 +++++++-------- httpobs/website/decorators.py | 3 ++- httpobs/website/main.py | 2 +- httpobs/website/monitoring.py | 3 +-- setup.py | 4 ++-- 23 files changed, 57 insertions(+), 69 deletions(-) diff --git a/httpobs/conf/__init__.py b/httpobs/conf/__init__.py index 2401c9a8..80a02e6c 100644 --- a/httpobs/conf/__init__.py +++ b/httpobs/conf/__init__.py @@ -1,9 +1,7 @@ -from os import environ, cpu_count - import configparser import os.path import sys - +from os import cpu_count, environ # Read in the default config file if /etc/httpobs.conf doesn't already exist __dirname = os.path.abspath(os.path.dirname(__file__)) diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 5b418018..1fd63565 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -1,7 +1,12 @@ +import sys from contextlib import contextmanager from json import dumps -from types import SimpleNamespace from os import getpid +from types import SimpleNamespace + +import psycopg2 +import psycopg2.extras +import psycopg2.pool from httpobs.conf import ( API_CACHED_RESULT_TIME, @@ -23,12 +28,7 @@ STATE_STARTING, ) from httpobs.scanner.analyzer import NUM_TESTS -from httpobs.scanner.grader import get_grade_and_likelihood_for_score, MINIMUM_SCORE_FOR_EXTRA_CREDIT - -import psycopg2 -import psycopg2.extras -import psycopg2.pool -import sys +from httpobs.scanner.grader import MINIMUM_SCORE_FOR_EXTRA_CREDIT, get_grade_and_likelihood_for_score class SimpleDatabaseConnection: diff --git a/httpobs/scanner/analyzer/__init__.py b/httpobs/scanner/analyzer/__init__.py index d3604b92..7f3e6234 100644 --- a/httpobs/scanner/analyzer/__init__.py +++ b/httpobs/scanner/analyzer/__init__.py @@ -6,8 +6,8 @@ referrer_policy, strict_transport_security, x_content_type_options, - x_xss_protection, x_frame_options, + x_xss_protection, ) from .misc import cross_origin_resource_sharing, redirection diff --git a/httpobs/scanner/analyzer/content.py b/httpobs/scanner/analyzer/content.py index 2dfd5592..6b0e060b 100644 --- a/httpobs/scanner/analyzer/content.py +++ b/httpobs/scanner/analyzer/content.py @@ -1,16 +1,14 @@ +import json +from urllib.parse import urlparse + from bs4 import BeautifulSoup as bs from publicsuffixlist import PublicSuffixList -from urllib.parse import urlparse from httpobs.conf import SCANNER_MOZILLA_DOMAINS from httpobs.scanner.analyzer.decorators import scored_test from httpobs.scanner.analyzer.utils import only_if_worse from httpobs.scanner.retriever.retriever import HTML_TYPES - -import json - - # Compat between Python 3.4 and Python 3.5 (see: https://github.com/mozilla/http-observatory-website/issues/14) if not hasattr(json, 'JSONDecodeError'): # pragma: no cover json.JSONDecodeError = ValueError diff --git a/httpobs/scanner/analyzer/headers.py b/httpobs/scanner/analyzer/headers.py index bf9658a3..8886739f 100644 --- a/httpobs/scanner/analyzer/headers.py +++ b/httpobs/scanner/analyzer/headers.py @@ -6,7 +6,6 @@ from httpobs.scanner.analyzer.utils import is_hpkp_preloaded, is_hsts_preloaded, only_if_worse from httpobs.scanner.retriever import get_duplicate_header_values - # Ignore the CloudFlare __cfduid tracking cookies. They *are* actually bad, but it is out of a site's # control. See https://github.com/mozilla/http-observatory/issues/121 for additional details. Hopefully # this will eventually be fixed on CloudFlare's end. diff --git a/httpobs/scanner/analyzer/misc.py b/httpobs/scanner/analyzer/misc.py index 65d5914e..009de542 100644 --- a/httpobs/scanner/analyzer/misc.py +++ b/httpobs/scanner/analyzer/misc.py @@ -1,6 +1,7 @@ -from bs4 import BeautifulSoup as bs from urllib.parse import urlparse +from bs4 import BeautifulSoup as bs + from httpobs.scanner.analyzer.decorators import scored_test from httpobs.scanner.analyzer.utils import is_hsts_preloaded diff --git a/httpobs/scanner/analyzer/utils.py b/httpobs/scanner/analyzer/utils.py index 102de2b6..3f20e847 100644 --- a/httpobs/scanner/analyzer/utils.py +++ b/httpobs/scanner/analyzer/utils.py @@ -1,7 +1,6 @@ import json import os.path - # Load the HSTS list from disk __dirname = os.path.abspath(os.path.dirname(__file__)) __filename = os.path.join(__dirname, '..', '..', 'conf', 'hsts-preload.json') diff --git a/httpobs/scanner/celeryconfig.py b/httpobs/scanner/celeryconfig.py index e28dea7d..81b1cc5c 100644 --- a/httpobs/scanner/celeryconfig.py +++ b/httpobs/scanner/celeryconfig.py @@ -1,6 +1,5 @@ from httpobs.conf import BROKER_URL as broker_url - # Set the Celery task queue broker_url = broker_url diff --git a/httpobs/scanner/grader/__init__.py b/httpobs/scanner/grader/__init__.py index e82f9034..2279e29a 100644 --- a/httpobs/scanner/grader/__init__.py +++ b/httpobs/scanner/grader/__init__.py @@ -1,12 +1,11 @@ from .grade import ( - get_score_description, - get_score_modifier, - get_grade_and_likelihood_for_score, GRADES, MINIMUM_SCORE_FOR_EXTRA_CREDIT, + get_grade_and_likelihood_for_score, + get_score_description, + get_score_modifier, ) - __all__ = [ 'get_score_description', 'get_score_modifier', diff --git a/httpobs/scanner/local.py b/httpobs/scanner/local.py index d0de410b..18dcc4c3 100644 --- a/httpobs/scanner/local.py +++ b/httpobs/scanner/local.py @@ -1,5 +1,4 @@ import httpobs.conf - from httpobs.scanner.analyzer import NUM_TESTS, tests from httpobs.scanner.grader import get_grade_and_likelihood_for_score, get_score_description from httpobs.scanner.retriever import retrieve_all diff --git a/httpobs/scanner/main.py b/httpobs/scanner/main.py index 5152e7f1..00d9b23e 100644 --- a/httpobs/scanner/main.py +++ b/httpobs/scanner/main.py @@ -1,7 +1,13 @@ +import datetime +import subprocess +import sys from random import randrange from time import sleep from urllib.parse import parse_qs, urlparse +import psutil +import redis + from httpobs.conf import ( BROKER_URL, SCANNER_ALLOW_KICKSTART, @@ -17,12 +23,6 @@ from httpobs.database import periodic_maintenance, refresh_materialized_views, update_scans_dequeue_scans from httpobs.scanner.tasks import scan -import datetime -import psutil -import redis -import subprocess -import sys - def main(): # Start each scanner at a random point in the range to spread out database maintenance diff --git a/httpobs/scanner/retriever/retriever.py b/httpobs/scanner/retriever/retriever.py index 2a9c391c..e5c2abd6 100644 --- a/httpobs/scanner/retriever/retriever.py +++ b/httpobs/scanner/retriever/retriever.py @@ -1,18 +1,17 @@ -from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded +import logging from urllib.parse import urlparse -from httpobs.conf import RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_CORS_ORIGIN, RETRIEVER_READ_TIMEOUT, RETRIEVER_USER_AGENT -from httpobs.scanner.utils import parse_http_equiv_headers - -import logging import requests - +from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded # Disable the requests InsecureRequestWarning -- we will track certificate errors manually when # verification is disabled. Also disable requests errors at levels lower than CRITICAL, see: # https://github.com/celery/celery/issues/3633 for crashy details from requests.packages.urllib3.exceptions import InsecureRequestWarning +from httpobs.conf import RETRIEVER_CONNECT_TIMEOUT, RETRIEVER_CORS_ORIGIN, RETRIEVER_READ_TIMEOUT, RETRIEVER_USER_AGENT +from httpobs.scanner.utils import parse_http_equiv_headers + requests.packages.urllib3.disable_warnings(InsecureRequestWarning) logging.getLogger('requests').setLevel(logging.CRITICAL) diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py index 3151aff1..efc6d68e 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/tasks.py @@ -1,16 +1,15 @@ +import sys + from celery import Celery from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError, WorkerShutdown, WorkerTerminate from httpobs.conf import DEVELOPMENT_MODE from httpobs.database import insert_test_results, select_site_headers, update_scan_state -from httpobs.scanner import celeryconfig, STATE_ABORTED, STATE_FAILED, STATE_RUNNING +from httpobs.scanner import STATE_ABORTED, STATE_FAILED, STATE_RUNNING, celeryconfig from httpobs.scanner.analyzer import tests from httpobs.scanner.retriever import retrieve_all from httpobs.scanner.utils import sanitize_headers -import sys - - # Create the scanner task queue scanner = Celery() scanner.config_from_object(celeryconfig) diff --git a/httpobs/scanner/utils.py b/httpobs/scanner/utils.py index 072ffe48..dcbea95d 100644 --- a/httpobs/scanner/utils.py +++ b/httpobs/scanner/utils.py @@ -1,13 +1,13 @@ import json import os.path -import requests import socket import sys +import requests from bs4 import BeautifulSoup as bs -from httpobs.conf import SCANNER_ALLOW_LOCALHOST, SCANNER_PINNED_DOMAINS from requests.structures import CaseInsensitiveDict +from httpobs.conf import SCANNER_ALLOW_LOCALHOST, SCANNER_PINNED_DOMAINS HSTS_URL = 'https://raw.githubusercontent.com/chromium/chromium/main/net/http/transport_security_state_static.json' diff --git a/httpobs/scripts/httpobs-local-scan b/httpobs/scripts/httpobs-local-scan index 0c40cd00..6193a77e 100755 --- a/httpobs/scripts/httpobs-local-scan +++ b/httpobs/scripts/httpobs-local-scan @@ -1,13 +1,11 @@ #!/usr/bin/env python3 -import httpobs.scanner.local - import argparse import json - from operator import itemgetter from urllib.parse import urlparse +import httpobs.scanner.local if __name__ == "__main__": parser = argparse.ArgumentParser() diff --git a/httpobs/scripts/httpobs-mass-scan b/httpobs/scripts/httpobs-mass-scan index cdabc718..49784362 100755 --- a/httpobs/scripts/httpobs-mass-scan +++ b/httpobs/scripts/httpobs-mass-scan @@ -2,14 +2,15 @@ from __future__ import print_function -from httpobs.conf import API_URL - -import grequests import os -import requests import sys import time +import grequests +import requests + +from httpobs.conf import API_URL + if 'HTTPOBS_DEV' in os.environ: # TODO: use httpobs.conf MAX_QUEUE = 64 else: diff --git a/httpobs/tests/unittests/test_retriever.py b/httpobs/tests/unittests/test_retriever.py index bbfb79d3..a60084dd 100644 --- a/httpobs/tests/unittests/test_retriever.py +++ b/httpobs/tests/unittests/test_retriever.py @@ -1,9 +1,9 @@ import random -import requests import string - from unittest import TestCase +import requests + from httpobs.scanner.retriever import get_duplicate_header_values, retrieve_all from httpobs.tests.utils import empty_requests diff --git a/httpobs/tests/utils.py b/httpobs/tests/utils.py index 7ba614b4..8e2dcb8e 100644 --- a/httpobs/tests/utils.py +++ b/httpobs/tests/utils.py @@ -1,12 +1,12 @@ +import os.path from collections import UserDict from copy import deepcopy -from requests.cookies import RequestsCookieJar from typing import Union + +from requests.cookies import RequestsCookieJar from urllib3 import HTTPResponse from urllib3._collections import HTTPHeaderDict -import os.path - from httpobs.scanner.utils import parse_http_equiv_headers diff --git a/httpobs/website/api.py b/httpobs/website/api.py index f89a8952..96c67457 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -1,16 +1,15 @@ -from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN -from httpobs.scanner import STATES -from httpobs.scanner.grader import get_score_description, GRADES -from httpobs.scanner.utils import valid_hostname -from httpobs.website import add_response_headers, sanitized_api_response +import json +import os.path from flask import Blueprint, jsonify, make_response, request from werkzeug.http import http_date import httpobs.database as database -import json -import os.path - +from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN +from httpobs.scanner import STATES +from httpobs.scanner.grader import GRADES, get_score_description +from httpobs.scanner.utils import valid_hostname +from httpobs.website import add_response_headers, sanitized_api_response api = Blueprint('api', __name__) diff --git a/httpobs/website/decorators.py b/httpobs/website/decorators.py index 6bf22825..53cccaf1 100644 --- a/httpobs/website/decorators.py +++ b/httpobs/website/decorators.py @@ -1,6 +1,7 @@ -from flask import jsonify, make_response, request from functools import wraps +from flask import jsonify, make_response, request + def add_response_headers(headers=None, default_headers=None, cors=False): """ diff --git a/httpobs/website/main.py b/httpobs/website/main.py index 4481411a..d2af069f 100644 --- a/httpobs/website/main.py +++ b/httpobs/website/main.py @@ -2,7 +2,7 @@ from flask import Flask -from httpobs.conf import DEVELOPMENT_MODE, API_PORT, API_PROPAGATE_EXCEPTIONS +from httpobs.conf import API_PORT, API_PROPAGATE_EXCEPTIONS, DEVELOPMENT_MODE from httpobs.website import add_response_headers from httpobs.website.api import api from httpobs.website.monitoring import monitoring_api diff --git a/httpobs/website/monitoring.py b/httpobs/website/monitoring.py index 1358c29a..8fdfdafc 100644 --- a/httpobs/website/monitoring.py +++ b/httpobs/website/monitoring.py @@ -1,9 +1,8 @@ -from flask import abort, Blueprint, jsonify +from flask import Blueprint, abort, jsonify from httpobs import SOURCE_URL, VERSION from httpobs.database import get_cursor - monitoring_api = Blueprint('monitoring-api', __name__) diff --git a/setup.py b/setup.py index 921668e1..7fcc533c 100644 --- a/setup.py +++ b/setup.py @@ -2,9 +2,9 @@ import os -from httpobs import SOURCE_URL, VERSION -from setuptools import setup, find_packages +from setuptools import find_packages, setup +from httpobs import SOURCE_URL, VERSION __dirname = os.path.abspath(os.path.dirname(__file__)) From 9d49d48893a777ab5fadb576fa232ab305e250f0 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 18:11:56 +0000 Subject: [PATCH 10/31] ignore formatting commits in git blame --- .git-blame-ignore-revs | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .git-blame-ignore-revs diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..8ad038dd --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,7 @@ +# .git-blame-ignore-revs +# run pre-commit against all files +376413493eca8298b8f2c17c8f02fc0d04285136 +# format all files with black +1583d515c0a767e2b69b3cdcd785008d2dd0cd0b +# format all files with isort +73b2a811edc4f366516c3a63da73fc5e769761e7 From 19e1e9b8631efa23405c401f420f3a88eb9f2ac3 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 18:15:53 +0000 Subject: [PATCH 11/31] run pre-commit in github action --- .github/workflows/test.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 644f2fb3..48c09878 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,12 +28,11 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} + - uses: pre-commit/action@v3.0.0 - name: Install dependencies run: | pip install poetry poetry config virtualenvs.create false poetry install - - name: Run flake8 tests - run: flake8 --config .flake8 httpobs - name: Run nose tests run: nosetests httpobs/tests -e insert_test_result -e scored_test -e select_test_results -e test_retrieve --with-coverage --cover-package=httpobs From 9eef64cb7951d82011d29ddc51a0ef85a478ade4 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 18:20:34 +0000 Subject: [PATCH 12/31] can't use official pre-commit github action --- .github/workflows/test.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 48c09878..f183da51 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,11 +28,12 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - - uses: pre-commit/action@v3.0.0 - name: Install dependencies run: | pip install poetry poetry config virtualenvs.create false poetry install + - name: Run pre-commit against all files + run: pre-commit run --all-files - name: Run nose tests run: nosetests httpobs/tests -e insert_test_result -e scored_test -e select_test_results -e test_retrieve --with-coverage --cover-package=httpobs From 09e32822d37d2786f3a2aecd5e33d78812168df9 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 13:26:59 +0000 Subject: [PATCH 13/31] remove unnecessary files post-poetry migration --- MANIFEST.in | 8 ------ setup.py | 72 ----------------------------------------------------- 2 files changed, 80 deletions(-) delete mode 100644 MANIFEST.in delete mode 100644 setup.py diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index f5145347..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,8 +0,0 @@ -include LICENSE -include MANIFEST.in -include README.md -include httpobs -include httpobs -recursive-include httpobs/conf *.conf *.json *.pem -global-exclude __pycache__ -global-exclude *.py[co] diff --git a/setup.py b/setup.py deleted file mode 100644 index 7fcc533c..00000000 --- a/setup.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python3 - -import os - -from setuptools import find_packages, setup - -from httpobs import SOURCE_URL, VERSION - -__dirname = os.path.abspath(os.path.dirname(__file__)) - -with open(os.path.join(__dirname, 'README.md')) as readme: - README = readme.read() - -setup( - name='httpobs', - version=VERSION, - description='HTTP Observatory: a set of tests and tools to scan your website for basic web hygeine.', - url=SOURCE_URL, - long_description_content_type='text/markdown', - long_description=README, - classifiers=[ - 'Development Status :: 4 - Beta', - 'Environment :: Web Environment', - 'Framework :: Flask', - 'Intended Audience :: Developers', - 'Intended Audience :: System Administrators', - 'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)', - 'Natural Language :: English', - 'Programming Language :: Python :: 3 :: Only', - 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', - 'Topic :: Security', - 'Topic :: Software Development :: Quality Assurance', - ], - author='April King', - author_email='april@mozilla.com', - packages=find_packages(), - include_package_data=True, - install_requires=[ - 'amqp==2.3.2', - 'beautifulsoup4==4.6.3', - 'billiard==3.5.0.4', - 'celery==4.2.1', - 'click==7.0', - 'coverage==4.5.2', - 'flake8==3.6.0', - 'httpobs-cli==1.0.2', - 'itsdangerous==1.1.0', - 'kombu==4.2.1', - 'MarkupSafe==1.1.0', - 'mccabe==0.6.1', - 'nose==1.3.7', - 'pep8==1.7.1', - 'pycodestyle==2.4.0', - 'pyflakes==2.0.0', - 'pytz==2018.7', - 'vine==1.1.4', - 'Werkzeug==0.14.1', - 'psycopg2>=2.7,<2.8', - 'redis==2.10.6', - 'psutil==5.9.0', - 'publicsuffixlist==0.7.12', - 'requests==2.27.1', - 'Flask==1.0.2', - 'uWSGI==2.0.17.1', - ], - scripts=[ - 'httpobs/scripts/httpobs-local-scan', - 'httpobs/scripts/httpobs-mass-scan', - 'httpobs/scripts/httpobs-scan-worker', - ], - zip_safe=False, -) From dee6d25e059e099ed5213d8f2e375a8c3336fb85 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 13:49:13 +0000 Subject: [PATCH 14/31] remove separate scanner instance remove celery and related dependencies run scan synchronously in api request --- docker-compose.yml | 23 +- httpobs/database/schema.sql | 29 +- httpobs/database/schema.sql.docker.sql | 1 - httpobs/scanner/celeryconfig.py | 13 - httpobs/scanner/main.py | 217 --------- httpobs/scanner/retriever/retriever.py | 7 - httpobs/scanner/tasks.py | 17 +- httpobs/scripts/httpobs-scan-worker | 40 -- httpobs/website/api.py | 2 + poetry.lock | 640 +++++++------------------ pyproject.toml | 11 +- 11 files changed, 194 insertions(+), 806 deletions(-) delete mode 100644 httpobs/scanner/celeryconfig.py delete mode 100644 httpobs/scanner/main.py delete mode 100755 httpobs/scripts/httpobs-scan-worker diff --git a/docker-compose.yml b/docker-compose.yml index d2b6fef1..e1956520 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,7 +2,7 @@ version: '2' services: website: build: . - command: uwsgi --http :57001 --wsgi-file /app/httpobs/website/main.py --processes 8 --callable app --master + command: uwsgi --http :57001 --wsgi-file /app/httpobs/website/main.py --processes 1 --callable app --master depends_on: - postgres environment: @@ -13,7 +13,7 @@ services: - postgres ports: - "57001:57001" - restart: always + restart: unless-stopped postgres: build: ./httpobs/database @@ -21,22 +21,3 @@ services: - POSTGRES_USER=httpobs - POSTGRES_PASSWORD=totallyfakepassword - POSTGRES_DB=http_observatory - - scanner: - build: . - command: /app/httpobs/scripts/httpobs-scan-worker - depends_on: - - postgres - - redis - environment: - - HTTPOBS_BROKER_URL=redis://redis:6379/0 - - HTTPOBS_DATABASE_HOST=postgres - - HTTPOBS_DATABASE_PASS=httpobsscannerpassword - - HTTPOBS_DATABASE_USER=httpobsscanner - - HTTPOBS_MAX_CONCURRENCY=16 - links: - - postgres - - redis - - redis: - image: redis diff --git a/httpobs/database/schema.sql b/httpobs/database/schema.sql index 846858f5..7dd27502 100644 --- a/httpobs/database/schema.sql +++ b/httpobs/database/schema.sql @@ -61,13 +61,6 @@ CREATE INDEX tests_name_idx ON tests (name); CREATE INDEX tests_result_idx ON tests (result); CREATE INDEX tests_pass_idx ON tests (pass); -CREATE USER httpobsscanner; -GRANT SELECT on sites, scans, expectations, tests TO httpobsscanner; -GRANT UPDATE (domain) ON sites to httpobsscanner; /* TODO: there's got to be a better way with SELECT ... FOR UPDATE */ -GRANT UPDATE on scans TO httpobsscanner; -GRANT INSERT on tests TO httpobsscanner; -GRANT USAGE ON SEQUENCE tests_id_seq TO httpobsscanner; - CREATE USER httpobsapi; GRANT SELECT ON expectations, scans, tests to httpobsapi; GRANT SELECT (id, domain, creation_time, public_headers) ON sites TO httpobsapi; @@ -78,6 +71,12 @@ GRANT USAGE ON SEQUENCE sites_id_seq TO httpobsapi; GRANT USAGE ON SEQUENCE scans_id_seq TO httpobsapi; GRANT USAGE ON SEQUENCE expectations_id_seq TO httpobsapi; +GRANT SELECT on sites, scans, expectations, tests TO httpobsapi; +GRANT UPDATE (domain) ON sites to httpobsapi; /* TODO: there's got to be a better way with SELECT ... FOR UPDATE */ +GRANT UPDATE on scans TO httpobsapi; +GRANT INSERT on tests TO httpobsapi; +GRANT USAGE ON SEQUENCE tests_id_seq TO httpobsapi; + CREATE INDEX scans_site_id_finished_state_end_time_idx ON scans (site_id, state, end_time DESC) WHERE state = 'FINISHED'; CREATE MATERIALIZED VIEW latest_scans @@ -129,7 +128,7 @@ ALTER TABLE scans ADD COLUMN likelihood_indicator VARCHAR NULL; /* Update to frequently refresh latest_scans */ /* GRANT SELECT ON latest_scans TO httpobsapi; -ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsscanner; +ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsapi; */ /* Update to add earliest scans and a way to compare earliest and latest */ @@ -163,13 +162,13 @@ CREATE UNIQUE INDEX scan_score_difference_distribution_summation_difference_idx COMMENT ON MATERIALIZED VIEW scan_score_difference_distribution_summation IS 'How many sites have improved by how many points'; GRANT SELECT ON scan_score_difference_distribution_summation TO httpobsapi; -ALTER MATERIALIZED VIEW grade_distribution OWNER TO httpobsscanner; /* so it can refresh */ -ALTER MATERIALIZED VIEW grade_distribution_all_scans OWNER TO httpobsscanner; /* so it can refresh */ -ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW earliest_scans OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW scan_score_difference_distribution OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW scan_score_difference_distribution_summation OWNER TO httpobsscanner; -ALTER MATERIALIZED VIEW latest_tests OWNER TO httpobsscanner; +ALTER MATERIALIZED VIEW grade_distribution OWNER TO httpobsapi; /* so it can refresh */ +ALTER MATERIALIZED VIEW grade_distribution_all_scans OWNER TO httpobsapi; /* so it can refresh */ +ALTER MATERIALIZED VIEW latest_scans OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW earliest_scans OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW scan_score_difference_distribution OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW scan_score_difference_distribution_summation OWNER TO httpobsapi; +ALTER MATERIALIZED VIEW latest_tests OWNER TO httpobsapi; /* Database updates to allow us to track changes in scoring over time */ /* diff --git a/httpobs/database/schema.sql.docker.sql b/httpobs/database/schema.sql.docker.sql index 48710b82..dd7fc924 100644 --- a/httpobs/database/schema.sql.docker.sql +++ b/httpobs/database/schema.sql.docker.sql @@ -1,4 +1,3 @@ /* silly alphabetical naming requirements */ ALTER ROLE httpobsapi LOGIN PASSWORD 'httpobsapipassword'; -ALTER ROLE httpobsscanner LOGIN PASSWORD 'httpobsscannerpassword'; diff --git a/httpobs/scanner/celeryconfig.py b/httpobs/scanner/celeryconfig.py deleted file mode 100644 index 81b1cc5c..00000000 --- a/httpobs/scanner/celeryconfig.py +++ /dev/null @@ -1,13 +0,0 @@ -from httpobs.conf import BROKER_URL as broker_url - -# Set the Celery task queue -broker_url = broker_url - -accept_content = ['json'] -task_ignore_resultS = True -worker_redirect_stdouts_level = 'WARNING' -result_serializer = 'json' -task_serializer = 'json' - -task_soft_time_limit = 751 -task_time_limit = 1129 diff --git a/httpobs/scanner/main.py b/httpobs/scanner/main.py deleted file mode 100644 index 00d9b23e..00000000 --- a/httpobs/scanner/main.py +++ /dev/null @@ -1,217 +0,0 @@ -import datetime -import subprocess -import sys -from random import randrange -from time import sleep -from urllib.parse import parse_qs, urlparse - -import psutil -import redis - -from httpobs.conf import ( - BROKER_URL, - SCANNER_ALLOW_KICKSTART, - SCANNER_ALLOW_KICKSTART_NUM_ABORTED, - SCANNER_BROKER_RECONNECTION_SLEEP_TIME, - SCANNER_CYCLE_SLEEP_TIME, - SCANNER_DATABASE_RECONNECTION_SLEEP_TIME, - SCANNER_MAINTENANCE_CYCLE_FREQUENCY, - SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY, - SCANNER_MAX_CPU_UTILIZATION, - SCANNER_MAX_LOAD, -) -from httpobs.database import periodic_maintenance, refresh_materialized_views, update_scans_dequeue_scans -from httpobs.scanner.tasks import scan - - -def main(): - # Start each scanner at a random point in the range to spread out database maintenance - dequeue_loop_count = randrange(0, SCANNER_MAINTENANCE_CYCLE_FREQUENCY) - materialized_view_loop_count = randrange(0, SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY) - - # Parse the BROKER_URL - broker_url = urlparse(BROKER_URL) - - if broker_url.scheme.lower() not in ('redis', 'redis+socket'): # Currently the de-queuer only support redis - print('Sorry, the scanner currently only supports redis.', file=sys.stderr) - sys.exit(1) - - # Get the current CPU utilization and wait a second to begin the loop for the next reading - psutil.cpu_percent() - sleep(1) - - while True: - try: - # TODO: Document this madness and magic numbers, make it configurable - # If max cpu is 90 and current CPU is 50, that gives us a headroom of 8 scans - headroom = int((SCANNER_MAX_CPU_UTILIZATION - psutil.cpu_percent()) / 5) - dequeue_quantity = min(headroom, SCANNER_MAX_LOAD) - - if headroom <= 0: - # If the cycle sleep time is .5, sleep 2 seconds at a minimum, 10 seconds at a maximum - sleep_time = min(max(abs(headroom), SCANNER_CYCLE_SLEEP_TIME * 4), 10) - print( - '[{time}] WARNING: Load too high. Sleeping for {num} second(s).'.format( - time=str(datetime.datetime.now()).split('.')[0], num=sleep_time - ), - file=sys.stderr, - ) - - sleep(sleep_time) - continue - - except: - # I've noticed that on laptops that Docker has a tendency to kill the scanner when the laptop sleeps; this - # is designed to catch that exception - sleep(1) - continue - - # Every so many scans, let's opportunistically clear out any PENDING scans that are older than 1800 seconds - # Also update the grade_distribution table - # If it fails, we don't care. Of course, nobody reads the comments, so I should say that *I* don't care. - try: - if dequeue_loop_count % SCANNER_MAINTENANCE_CYCLE_FREQUENCY == 0: - print( - '[{time}] INFO: Performing periodic maintenance.'.format( - time=str(datetime.datetime.now()).split('.')[0] - ), - file=sys.stderr, - ) - - dequeue_loop_count = 0 - num = periodic_maintenance() - - if num > 0: - print( - '[{time}] INFO: Cleared {num} broken scan(s).'.format( - time=str(datetime.datetime.now()).split('.')[0], num=num - ), - file=sys.stderr, - ) - - # Forcibly restart if things are going real bad, sleep for a bit to avoid flagging - if num > SCANNER_ALLOW_KICKSTART_NUM_ABORTED and SCANNER_ALLOW_KICKSTART: - sleep(10) - try: - print( - '[{time}] ERROR: Celery appears to be hung. Attempting to kickstart the scanners.'.format( - time=str(datetime.datetime.now()).split('.')[0] - ), - file=sys.stderr, - ) - subprocess.call(['pkill', '-u', 'httpobs']) - except FileNotFoundError: - print( - '[{time}] ERROR: Tried to kickstart, but no pkill found.'.format( - time=str(datetime.datetime.now()).split('.')[0] - ), - file=sys.stderr, - ) - except: - print( - '[{time}] ERROR: Tried to kickstart, but failed for unknown reasons.'.format( - time=str(datetime.datetime.now()).split('.')[0] - ), - file=sys.stderr, - ) - except: - pass - finally: - dequeue_loop_count += 1 - num = 0 - - # Every so often we need to refresh the materialized views that the statistics depend on - try: - if materialized_view_loop_count % SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY == 0: - print( - '[{time}] INFO: Refreshing materialized views.'.format( - time=str(datetime.datetime.now()).split('.')[0] - ), - file=sys.stderr, - ) - - materialized_view_loop_count = 0 - refresh_materialized_views() - - print( - '[{time}] INFO: Materialized views refreshed.'.format( - time=str(datetime.datetime.now()).split('.')[0] - ), - file=sys.stderr, - ) - except: - pass - finally: - materialized_view_loop_count += 1 - - # Verify that the broker is still up; if it's down, let's sleep and try again later - try: - if broker_url.scheme.lower() == 'redis': - conn = redis.Connection( - host=broker_url.hostname, - port=broker_url.port or 6379, - db=int(broker_url.path[1:] if len(broker_url.path) > 0 else 0), - password=broker_url.password, - ) - else: - conn = redis.UnixDomainSocketConnection( - path=broker_url.path, db=int(parse_qs(broker_url.query).get('virtual_host', ['0'])[0]) - ) - - conn.connect() - conn.can_read() - conn.disconnect() - del conn - except: - print( - '[{time}] ERROR: Unable to connect to to redis. Sleeping for {num} seconds.'.format( - time=str(datetime.datetime.now()).split('.')[0], num=SCANNER_BROKER_RECONNECTION_SLEEP_TIME - ), - file=sys.stderr, - ) - sleep(SCANNER_BROKER_RECONNECTION_SLEEP_TIME) - continue - - # Get a list of sites that are pending - try: - sites_to_scan = update_scans_dequeue_scans(dequeue_quantity) - except IOError: - print( - '[{time}] ERROR: Unable to retrieve lists of sites to scan. Sleeping for {num} seconds.'.format( - time=str(datetime.datetime.now()).split('.')[0], num=SCANNER_DATABASE_RECONNECTION_SLEEP_TIME - ), - file=sys.stderr, - ) - sleep(SCANNER_DATABASE_RECONNECTION_SLEEP_TIME) - continue - - try: - if sites_to_scan: - print( - '[{time}] INFO: Dequeuing {num} site(s): {sites}.'.format( - time=str(datetime.datetime.now()).split('.')[0], - num=len(sites_to_scan), - sites=', '.join([site[0] for site in sites_to_scan]), - ), - file=sys.stderr, - ) - - for site in sites_to_scan: - scan.delay(*site) - - # Always sleep at least some amount of time so that CPU utilization measurements can track - sleep(SCANNER_CYCLE_SLEEP_TIME / 2) - else: # If the queue was empty, lets sleep a little bit - sleep(SCANNER_CYCLE_SLEEP_TIME) - except KeyboardInterrupt: - print('Exiting scanner backend') - sys.exit(1) - except: # this shouldn't trigger, but we don't want a scan breakage to kill the scanner - print( - '[{time}] ERROR: Unknown celery error.'.format(time=str(datetime.datetime.now()).split('.')[0]), - file=sys.stderr, - ) - - -if __name__ == '__main__': - main() diff --git a/httpobs/scanner/retriever/retriever.py b/httpobs/scanner/retriever/retriever.py index e5c2abd6..c8e9b131 100644 --- a/httpobs/scanner/retriever/retriever.py +++ b/httpobs/scanner/retriever/retriever.py @@ -2,7 +2,6 @@ from urllib.parse import urlparse import requests -from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded # Disable the requests InsecureRequestWarning -- we will track certificate errors manually when # verification is disabled. Also disable requests errors at levels lower than CRITICAL, see: @@ -60,9 +59,6 @@ def __create_session(url: str, **kwargs) -> dict: # No tls errors r.verified = True - # Let celery exceptions percolate upward - except (SoftTimeLimitExceeded, TimeLimitExceeded): - raise # We can try again if there's an SSL error, making sure to note it in the session except requests.exceptions.SSLError: try: @@ -103,9 +99,6 @@ def __get(session, relative_path='/', headers=None, cookies=None): cookies=cookies, timeout=TIMEOUT, ) - # Let celery exceptions percolate upward - except (SoftTimeLimitExceeded, TimeLimitExceeded): - raise except (KeyboardInterrupt, SystemExit): raise except: diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py index efc6d68e..6f60e1a4 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/tasks.py @@ -1,21 +1,13 @@ import sys -from celery import Celery -from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded, WorkerLostError, WorkerShutdown, WorkerTerminate - from httpobs.conf import DEVELOPMENT_MODE from httpobs.database import insert_test_results, select_site_headers, update_scan_state -from httpobs.scanner import STATE_ABORTED, STATE_FAILED, STATE_RUNNING, celeryconfig +from httpobs.scanner import STATE_FAILED, STATE_RUNNING from httpobs.scanner.analyzer import tests from httpobs.scanner.retriever import retrieve_all from httpobs.scanner.utils import sanitize_headers -# Create the scanner task queue -scanner = Celery() -scanner.config_from_object(celeryconfig) - -@scanner.task() def scan(hostname: str, site_id: int, scan_id: int): try: # Once celery kicks off the task, let's update the scan state from PENDING to RUNNING @@ -35,7 +27,7 @@ def scan(hostname: str, site_id: int, scan_id: int): # Execute each test, replacing the underscores in the function name with dashes in the test name # TODO: Get overridden expectations - insert_test_results( + return insert_test_results( site_id, scan_id, [test(reqs) for test in tests], @@ -43,11 +35,6 @@ def scan(hostname: str, site_id: int, scan_id: int): reqs['responses']['auto'].status_code, ) - # catch the celery timeout, which will almost certainly occur in retrieve_all() - except SoftTimeLimitExceeded: - update_scan_state(scan_id, STATE_ABORTED, error='site unresponsive') - except (TimeLimitExceeded, WorkerLostError, WorkerShutdown, WorkerTerminate): - raise # the database is down, oh no! except IOError: print('database down, aborting scan on {hostname}'.format(hostname=hostname), file=sys.stderr) diff --git a/httpobs/scripts/httpobs-scan-worker b/httpobs/scripts/httpobs-scan-worker deleted file mode 100755 index e9391149..00000000 --- a/httpobs/scripts/httpobs-scan-worker +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env sh - -if [ $(uname -s) = "Darwin" ]; then - ulimit -n 2048 # increase file descriptor limits on OS X -fi - -if [ -n "$HTTPOBS_MAX_CONCURRENCY" ]; then - CONCURRENCY=$HTTPOBS_MAX_CONCURRENCY - LOGLEVEL=warning -elif [ -n "$HTTPOBS_DEV" ]; then - CONCURRENCY=48 - LOGLEVEL=info -else - CONCURRENCY=96 - LOGLEVEL=warning -fi - -# Kill the existing celery workers -PID='/var/run/httpobs/scanner.pid' -if [ -f $PID ]; -then - kill `cat $PID` - rm -f $PID -fi - -# Execute celery -celery \ - -A httpobs.scanner.tasks \ - --broker=$HTTPOBS_BROKER_URL \ - worker \ - --autoscale=$CONCURRENCY,4 \ - --hostname='scanner@%h' \ - --logfile='/var/log/httpobs/scanner.log' \ - --loglevel=$LOGLEVEL \ - --max-tasks-per-child=16 \ - --pidfile='/var/run/httpobs/scanner.pid' \ - & - -# Run the scanner -python3 -u httpobs/scanner/main.py >> /var/log/httpobs/scan-worker.log 2>&1 diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 96c67457..0effe201 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -8,6 +8,7 @@ from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN from httpobs.scanner import STATES from httpobs.scanner.grader import GRADES, get_score_description +from httpobs.scanner.tasks import scan from httpobs.scanner.utils import valid_hostname from httpobs.website import add_response_headers, sanitized_api_response @@ -65,6 +66,7 @@ def api_post_scan_hostname(): # Begin the dispatch process if it was a POST if request.method == 'POST': row = database.insert_scan(site_id, hidden=hidden) + row = scan(hostname, site_id, row["id"]) else: return { 'error': 'recent-scan-not-found', diff --git a/poetry.lock b/poetry.lock index 945e3cc6..6a7319d2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,30 +1,5 @@ # This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. -[[package]] -name = "amqp" -version = "5.1.1" -description = "Low-level AMQP client for Python (fork of amqplib)." -optional = false -python-versions = ">=3.6" -files = [ - {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, - {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, -] - -[package.dependencies] -vine = ">=5.0.0" - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - [[package]] name = "beautifulsoup4" version = "4.12.2" @@ -43,17 +18,6 @@ soupsieve = ">1.2" html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "billiard" -version = "4.1.0" -description = "Python multiprocessing fork with improvements and bugfixes" -optional = false -python-versions = ">=3.7" -files = [ - {file = "billiard-4.1.0-py3-none-any.whl", hash = "sha256:0f50d6be051c6b2b75bfbc8bfd85af195c5739c281d3f5b86a5640c65563614a"}, - {file = "billiard-4.1.0.tar.gz", hash = "sha256:1ad2eeae8e28053d729ba3373d34d9d6e210f6e4d8bf0a9c64f92bd053f1edf5"}, -] - [[package]] name = "black" version = "23.12.1" @@ -100,79 +64,24 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "blinker" -version = "1.6.2" +version = "1.7.0" description = "Fast, simple object-to-object and broadcast signaling" optional = false -python-versions = ">=3.7" -files = [ - {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, - {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, -] - -[[package]] -name = "celery" -version = "5.3.4" -description = "Distributed Task Queue." -optional = false python-versions = ">=3.8" files = [ - {file = "celery-5.3.4-py3-none-any.whl", hash = "sha256:1e6ed40af72695464ce98ca2c201ad0ef8fd192246f6c9eac8bba343b980ad34"}, - {file = "celery-5.3.4.tar.gz", hash = "sha256:9023df6a8962da79eb30c0c84d5f4863d9793a466354cc931d7f72423996de28"}, + {file = "blinker-1.7.0-py3-none-any.whl", hash = "sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9"}, + {file = "blinker-1.7.0.tar.gz", hash = "sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182"}, ] -[package.dependencies] -billiard = ">=4.1.0,<5.0" -click = ">=8.1.2,<9.0" -click-didyoumean = ">=0.3.0" -click-plugins = ">=1.1.1" -click-repl = ">=0.2.0" -kombu = ">=5.3.2,<6.0" -python-dateutil = ">=2.8.2" -tzdata = ">=2022.7" -vine = ">=5.0.0,<6.0" - -[package.extras] -arangodb = ["pyArango (>=2.0.2)"] -auth = ["cryptography (==41.0.3)"] -azureblockblob = ["azure-storage-blob (>=12.15.0)"] -brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] -cassandra = ["cassandra-driver (>=3.25.0,<4)"] -consul = ["python-consul2 (==0.1.5)"] -cosmosdbsql = ["pydocumentdb (==2.3.5)"] -couchbase = ["couchbase (>=3.0.0)"] -couchdb = ["pycouchdb (==1.14.2)"] -django = ["Django (>=2.2.28)"] -dynamodb = ["boto3 (>=1.26.143)"] -elasticsearch = ["elasticsearch (<8.0)"] -eventlet = ["eventlet (>=0.32.0)"] -gevent = ["gevent (>=1.5.0)"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -memcache = ["pylibmc (==1.6.3)"] -mongodb = ["pymongo[srv] (>=4.0.2)"] -msgpack = ["msgpack (==1.0.5)"] -pymemcache = ["python-memcached (==1.59)"] -pyro = ["pyro4 (==4.82)"] -pytest = ["pytest-celery (==0.0.0)"] -redis = ["redis (>=4.5.2,!=4.5.5,<5.0.0)"] -s3 = ["boto3 (>=1.26.143)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -solar = ["ephem (==4.1.4)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "kombu[sqs] (>=5.3.0)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=1.3.1)"] -zstd = ["zstandard (==0.21.0)"] - [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] @@ -188,101 +97,101 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.0" +version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.0.tar.gz", hash = "sha256:63563193aec44bce707e0c5ca64ff69fa72ed7cf34ce6e11d5127555756fd2f6"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:effe5406c9bd748a871dbcaf3ac69167c38d72db8c9baf3ff954c344f31c4cbe"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4162918ef3098851fcd8a628bf9b6a98d10c380725df9e04caf5ca6dd48c847a"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0570d21da019941634a531444364f2482e8db0b3425fcd5ac0c36565a64142c8"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5707a746c6083a3a74b46b3a631d78d129edab06195a92a8ece755aac25a3f3d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278c296c6f96fa686d74eb449ea1697f3c03dc28b75f873b65b5201806346a69"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a4b71f4d1765639372a3b32d2638197f5cd5221b19531f9245fcc9ee62d38f56"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5969baeaea61c97efa706b9b107dcba02784b1601c74ac84f2a532ea079403e"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f93dab657839dfa61025056606600a11d0b696d79386f974e459a3fbc568ec"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db756e48f9c5c607b5e33dd36b1d5872d0422e960145b08ab0ec7fd420e9d649"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:232ac332403e37e4a03d209a3f92ed9071f7d3dbda70e2a5e9cff1c4ba9f0678"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e5c1502d4ace69a179305abb3f0bb6141cbe4714bc9b31d427329a95acfc8bdd"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:2502dd2a736c879c0f0d3e2161e74d9907231e25d35794584b1ca5284e43f596"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23e8565ab7ff33218530bc817922fae827420f143479b753104ab801145b1d5b"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win32.whl", hash = "sha256:1872d01ac8c618a8da634e232f24793883d6e456a66593135aeafe3784b0848d"}, - {file = "charset_normalizer-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:557b21a44ceac6c6b9773bc65aa1b4cc3e248a5ad2f5b914b91579a32e22204d"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d7eff0f27edc5afa9e405f7165f85a6d782d308f3b6b9d96016c010597958e63"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6a685067d05e46641d5d1623d7c7fdf15a357546cbb2f71b0ebde91b175ffc3e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0d3d5b7db9ed8a2b11a774db2bbea7ba1884430a205dbd54a32d61d7c2a190fa"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2935ffc78db9645cb2086c2f8f4cfd23d9b73cc0dc80334bc30aac6f03f68f8c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fe359b2e3a7729010060fbca442ca225280c16e923b37db0e955ac2a2b72a05"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:380c4bde80bce25c6e4f77b19386f5ec9db230df9f2f2ac1e5ad7af2caa70459"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0d1e3732768fecb052d90d62b220af62ead5748ac51ef61e7b32c266cac9293"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b2919306936ac6efb3aed1fbf81039f7087ddadb3160882a57ee2ff74fd2382"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f8888e31e3a85943743f8fc15e71536bda1c81d5aa36d014a3c0c44481d7db6e"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:82eb849f085624f6a607538ee7b83a6d8126df6d2f7d3b319cb837b289123078"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7b8b8bf1189b3ba9b8de5c8db4d541b406611a71a955bbbd7385bbc45fcb786c"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5adf257bd58c1b8632046bbe43ee38c04e1038e9d37de9c57a94d6bd6ce5da34"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c350354efb159b8767a6244c166f66e67506e06c8924ed74669b2c70bc8735b1"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win32.whl", hash = "sha256:02af06682e3590ab952599fbadac535ede5d60d78848e555aa58d0c0abbde786"}, - {file = "charset_normalizer-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:86d1f65ac145e2c9ed71d8ffb1905e9bba3a91ae29ba55b4c46ae6fc31d7c0d4"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3b447982ad46348c02cb90d230b75ac34e9886273df3a93eec0539308a6296d7"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:abf0d9f45ea5fb95051c8bfe43cb40cda383772f7e5023a83cc481ca2604d74e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b09719a17a2301178fac4470d54b1680b18a5048b481cb8890e1ef820cb80455"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d9b48ee6e3967b7901c052b670c7dda6deb812c309439adaffdec55c6d7b78"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edfe077ab09442d4ef3c52cb1f9dab89bff02f4524afc0acf2d46be17dc479f5"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3debd1150027933210c2fc321527c2299118aa929c2f5a0a80ab6953e3bd1908"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86f63face3a527284f7bb8a9d4f78988e3c06823f7bea2bd6f0e0e9298ca0403"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24817cb02cbef7cd499f7c9a2735286b4782bd47a5b3516a0e84c50eab44b98e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c71f16da1ed8949774ef79f4a0260d28b83b3a50c6576f8f4f0288d109777989"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9cf3126b85822c4e53aa28c7ec9869b924d6fcfb76e77a45c44b83d91afd74f9"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b3b2316b25644b23b54a6f6401074cebcecd1244c0b8e80111c9a3f1c8e83d65"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:03680bb39035fbcffe828eae9c3f8afc0428c91d38e7d61aa992ef7a59fb120e"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cc152c5dd831641e995764f9f0b6589519f6f5123258ccaca8c6d34572fefa8"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win32.whl", hash = "sha256:b8f3307af845803fb0b060ab76cf6dd3a13adc15b6b451f54281d25911eb92df"}, - {file = "charset_normalizer-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:8eaf82f0eccd1505cf39a45a6bd0a8cf1c70dcfc30dba338207a969d91b965c0"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc45229747b67ffc441b3de2f3ae5e62877a282ea828a5bdb67883c4ee4a8810"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f4a0033ce9a76e391542c182f0d48d084855b5fcba5010f707c8e8c34663d77"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ada214c6fa40f8d800e575de6b91a40d0548139e5dc457d2ebb61470abf50186"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b1121de0e9d6e6ca08289583d7491e7fcb18a439305b34a30b20d8215922d43c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1063da2c85b95f2d1a430f1c33b55c9c17ffaf5e612e10aeaad641c55a9e2b9d"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70f1d09c0d7748b73290b29219e854b3207aea922f839437870d8cc2168e31cc"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:250c9eb0f4600361dd80d46112213dff2286231d92d3e52af1e5a6083d10cad9"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:750b446b2ffce1739e8578576092179160f6d26bd5e23eb1789c4d64d5af7dc7"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:fc52b79d83a3fe3a360902d3f5d79073a993597d48114c29485e9431092905d8"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:588245972aca710b5b68802c8cad9edaa98589b1b42ad2b53accd6910dad3545"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e39c7eb31e3f5b1f88caff88bcff1b7f8334975b46f6ac6e9fc725d829bc35d4"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win32.whl", hash = "sha256:abecce40dfebbfa6abf8e324e1860092eeca6f7375c8c4e655a8afb61af58f2c"}, - {file = "charset_normalizer-3.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a91a981f185721542a0b7c92e9054b7ab4fea0508a795846bc5b0abf8118d4"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:67b8cc9574bb518ec76dc8e705d4c39ae78bb96237cb533edac149352c1f39fe"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac71b2977fb90c35d41c9453116e283fac47bb9096ad917b8819ca8b943abecd"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3ae38d325b512f63f8da31f826e6cb6c367336f95e418137286ba362925c877e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:542da1178c1c6af8873e143910e2269add130a299c9106eef2594e15dae5e482"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a85aed0b864ac88309b7d94be09f6046c834ef60762a8833b660139cfbad13"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aae32c93e0f64469f74ccc730a7cb21c7610af3a775157e50bbd38f816536b38"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b26ddf78d57f1d143bdf32e820fd8935d36abe8a25eb9ec0b5a71c82eb3895"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f5d10bae5d78e4551b7be7a9b29643a95aded9d0f602aa2ba584f0388e7a557"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:249c6470a2b60935bafd1d1d13cd613f8cd8388d53461c67397ee6a0f5dce741"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c5a74c359b2d47d26cdbbc7845e9662d6b08a1e915eb015d044729e92e7050b7"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:b5bcf60a228acae568e9911f410f9d9e0d43197d030ae5799e20dca8df588287"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:187d18082694a29005ba2944c882344b6748d5be69e3a89bf3cc9d878e548d5a"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:81bf654678e575403736b85ba3a7867e31c2c30a69bc57fe88e3ace52fb17b89"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win32.whl", hash = "sha256:85a32721ddde63c9df9ebb0d2045b9691d9750cb139c161c80e500d210f5e26e"}, - {file = "charset_normalizer-3.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:468d2a840567b13a590e67dd276c570f8de00ed767ecc611994c301d0f8c014f"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e0fc42822278451bc13a2e8626cf2218ba570f27856b536e00cfa53099724828"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:09c77f964f351a7369cc343911e0df63e762e42bac24cd7d18525961c81754f4"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12ebea541c44fdc88ccb794a13fe861cc5e35d64ed689513a5c03d05b53b7c82"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:805dfea4ca10411a5296bcc75638017215a93ffb584c9e344731eef0dcfb026a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96c2b49eb6a72c0e4991d62406e365d87067ca14c1a729a870d22354e6f68115"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf7b34c5bc56b38c931a54f7952f1ff0ae77a2e82496583b247f7c969eb1479"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:619d1c96099be5823db34fe89e2582b336b5b074a7f47f819d6b3a57ff7bdb86"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0ac5e7015a5920cfce654c06618ec40c33e12801711da6b4258af59a8eff00a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:93aa7eef6ee71c629b51ef873991d6911b906d7312c6e8e99790c0f33c576f89"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7966951325782121e67c81299a031f4c115615e68046f79b85856b86ebffc4cd"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:02673e456dc5ab13659f85196c534dc596d4ef260e4d86e856c3b2773ce09843"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c2af80fb58f0f24b3f3adcb9148e6203fa67dd3f61c4af146ecad033024dde43"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:153e7b6e724761741e0974fc4dcd406d35ba70b92bfe3fedcb497226c93b9da7"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win32.whl", hash = "sha256:d47ecf253780c90ee181d4d871cd655a789da937454045b17b5798da9393901a"}, - {file = "charset_normalizer-3.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d97d85fa63f315a8bdaba2af9a6a686e0eceab77b3089af45133252618e70884"}, - {file = "charset_normalizer-3.3.0-py3-none-any.whl", hash = "sha256:e46cd37076971c1040fc8c41273a8b3e2c624ce4f2be3f5dfcb7a430c1d3acc2"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] @@ -299,55 +208,6 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} -[[package]] -name = "click-didyoumean" -version = "0.3.0" -description = "Enables git-like *did-you-mean* feature in click" -optional = false -python-versions = ">=3.6.2,<4.0.0" -files = [ - {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, - {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, -] - -[package.dependencies] -click = ">=7" - -[[package]] -name = "click-plugins" -version = "1.1.1" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -optional = false -python-versions = "*" -files = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] - -[[package]] -name = "click-repl" -version = "0.3.0" -description = "REPL plugin for Click" -optional = false -python-versions = ">=3.6" -files = [ - {file = "click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9"}, - {file = "click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812"}, -] - -[package.dependencies] -click = ">=7.0" -prompt-toolkit = ">=3.0.36" - -[package.extras] -testing = ["pytest (>=7.2.1)", "pytest-cov (>=4.0.0)", "tox (>=4.4.3)"] - [[package]] name = "colorama" version = "0.4.6" @@ -361,63 +221,63 @@ files = [ [[package]] name = "coverage" -version = "7.3.2" +version = "7.4.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" files = [ - {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, - {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, - {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, - {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, - {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, - {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, - {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, - {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, - {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, - {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, - {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, - {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, - {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, - {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, - {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, - {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"}, - {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"}, - {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"}, - {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"}, - {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"}, - {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"}, - {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"}, - {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"}, - {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"}, - {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"}, - {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"}, - {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, - {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, + {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516"}, + {file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae"}, + {file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43"}, + {file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451"}, + {file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca"}, + {file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc"}, + {file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09"}, + {file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26"}, + {file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614"}, + {file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143"}, + {file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446"}, + {file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a"}, + {file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa"}, + {file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450"}, + {file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e"}, + {file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1"}, + {file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e"}, + {file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105"}, + {file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2"}, + {file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42"}, + {file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed"}, + {file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058"}, + {file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f"}, + {file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932"}, + {file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e"}, + {file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6"}, + {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] [package.extras] @@ -488,19 +348,6 @@ Werkzeug = ">=3.0.0" async = ["asgiref (>=3.2)"] dotenv = ["python-dotenv"] -[[package]] -name = "httpobs-cli" -version = "1.0.2" -description = "HTTP Observatory: a command line tool to scan your website" -optional = false -python-versions = "*" -files = [ - {file = "httpobs-cli-1.0.2.tar.gz", hash = "sha256:6499b99b4298e9a217361bff7c8679954fd966891e02eb4bdbe0d4f95b02e67f"}, -] - -[package.dependencies] -requests = "*" - [[package]] name = "identify" version = "2.5.33" @@ -517,13 +364,13 @@ license = ["ukkonen"] [[package]] name = "idna" -version = "3.4" +version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, ] [[package]] @@ -568,38 +415,6 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] -[[package]] -name = "kombu" -version = "5.3.2" -description = "Messaging library for Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "kombu-5.3.2-py3-none-any.whl", hash = "sha256:b753c9cfc9b1e976e637a7cbc1a65d446a22e45546cd996ea28f932082b7dc9e"}, - {file = "kombu-5.3.2.tar.gz", hash = "sha256:0ba213f630a2cb2772728aef56ac6883dc3a2f13435e10048f6e97d48506dbbd"}, -] - -[package.dependencies] -amqp = ">=5.1.1,<6.0.0" -vine = "*" - -[package.extras] -azureservicebus = ["azure-servicebus (>=7.10.0)"] -azurestoragequeues = ["azure-identity (>=1.12.0)", "azure-storage-queue (>=12.6.0)"] -confluentkafka = ["confluent-kafka (==2.1.1)"] -consul = ["python-consul2"] -librabbitmq = ["librabbitmq (>=2.0.0)"] -mongodb = ["pymongo (>=4.1.1)"] -msgpack = ["msgpack"] -pyro = ["pyro4"] -qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] -redis = ["redis (>=4.5.2)"] -slmq = ["softlayer-messaging (>=1.0.3)"] -sqlalchemy = ["sqlalchemy (>=1.4.48,<2.1)"] -sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] -yaml = ["PyYAML (>=3.10)"] -zookeeper = ["kazoo (>=2.8.0)"] - [[package]] name = "markupsafe" version = "2.1.3" @@ -771,46 +586,6 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" -[[package]] -name = "prompt-toolkit" -version = "3.0.39" -description = "Library for building powerful interactive command lines in Python" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "prompt_toolkit-3.0.39-py3-none-any.whl", hash = "sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"}, - {file = "prompt_toolkit-3.0.39.tar.gz", hash = "sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac"}, -] - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "psutil" -version = "5.9.5" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "psutil-5.9.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:be8929ce4313f9f8146caad4272f6abb8bf99fc6cf59344a3167ecd74f4f203f"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab8ed1a1d77c95453db1ae00a3f9c50227ebd955437bcf2a574ba8adbf6a74d5"}, - {file = "psutil-5.9.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4aef137f3345082a3d3232187aeb4ac4ef959ba3d7c10c33dd73763fbc063da4"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ea8518d152174e1249c4f2a1c89e3e6065941df2fa13a1ab45327716a23c2b48"}, - {file = "psutil-5.9.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:acf2aef9391710afded549ff602b5887d7a2349831ae4c26be7c807c0a39fac4"}, - {file = "psutil-5.9.5-cp27-none-win32.whl", hash = "sha256:5b9b8cb93f507e8dbaf22af6a2fd0ccbe8244bf30b1baad6b3954e935157ae3f"}, - {file = "psutil-5.9.5-cp27-none-win_amd64.whl", hash = "sha256:8c5f7c5a052d1d567db4ddd231a9d27a74e8e4a9c3f44b1032762bd7b9fdcd42"}, - {file = "psutil-5.9.5-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:3c6f686f4225553615612f6d9bc21f1c0e305f75d7d8454f9b46e901778e7217"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7a7dd9997128a0d928ed4fb2c2d57e5102bb6089027939f3b722f3a210f9a8da"}, - {file = "psutil-5.9.5-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89518112647f1276b03ca97b65cc7f64ca587b1eb0278383017c2a0dcc26cbe4"}, - {file = "psutil-5.9.5-cp36-abi3-win32.whl", hash = "sha256:104a5cc0e31baa2bcf67900be36acde157756b9c44017b86b2c049f11957887d"}, - {file = "psutil-5.9.5-cp36-abi3-win_amd64.whl", hash = "sha256:b258c0c1c9d145a1d5ceffab1134441c4c5113b2417fafff7315a917a026c3c9"}, - {file = "psutil-5.9.5-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:c607bb3b57dc779d55e1554846352b4e358c10fff3abf3514a7a6601beebdb30"}, - {file = "psutil-5.9.5.tar.gz", hash = "sha256:5410638e4df39c54d957fc51ce03048acd8e6d60abc0f5107af51e5fb566eb3c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - [[package]] name = "psycopg2" version = "2.9.9" @@ -833,13 +608,13 @@ files = [ [[package]] name = "publicsuffixlist" -version = "0.10.0.20231002" +version = "0.10.0.20231214" description = "publicsuffixlist implement" optional = false python-versions = ">=2.6" files = [ - {file = "publicsuffixlist-0.10.0.20231002-py2.py3-none-any.whl", hash = "sha256:81990427ec5dbdc8f2620c1775d5bc47ba54fe44b4e64797d06040d708d67171"}, - {file = "publicsuffixlist-0.10.0.20231002.tar.gz", hash = "sha256:a8ef3f5745196fd956bcf6f425b5000450896c616ee6e95130e147e2fae10ccc"}, + {file = "publicsuffixlist-0.10.0.20231214-py2.py3-none-any.whl", hash = "sha256:10e227902e3b2acefb604b5de8a8a7d3df237f2885f06762d47fdbc9e0528b67"}, + {file = "publicsuffixlist-0.10.0.20231214.tar.gz", hash = "sha256:76a2ed46814f091ea867fb40a6c20c142a437af7aae7ac8eb425ddc464bcb8e1"}, ] [package.extras] @@ -848,13 +623,13 @@ update = ["requests"] [[package]] name = "pycodestyle" -version = "2.11.0" +version = "2.11.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, - {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] @@ -879,20 +654,6 @@ files = [ {file = "pynose-1.4.8.tar.gz", hash = "sha256:c8c1d500f5b64693432520438124c0fd016bfe30826cc3d34848e83e11dd0d02"}, ] -[[package]] -name = "python-dateutil" -version = "2.8.2" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, -] - -[package.dependencies] -six = ">=1.5" - [[package]] name = "pyyaml" version = "6.0.1" @@ -952,24 +713,6 @@ files = [ {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] -[[package]] -name = "redis" -version = "5.0.1" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.7" -files = [ - {file = "redis-5.0.1-py3-none-any.whl", hash = "sha256:ed4802971884ae19d640775ba3b03aa2e7bd5e8fb8dfaed2decce4d0fc48391f"}, - {file = "redis-5.0.1.tar.gz", hash = "sha256:0dab495cd5753069d3bc650a0dde8a8f9edde16fc5691b689a566eda58100d0f"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.2", markers = "python_full_version <= \"3.11.2\""} - -[package.extras] -hiredis = ["hiredis (>=1.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==20.0.1)", "requests (>=2.26.0)"] - [[package]] name = "requests" version = "2.31.0" @@ -1007,17 +750,6 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - [[package]] name = "soupsieve" version = "2.5" @@ -1029,53 +761,30 @@ files = [ {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, ] -[[package]] -name = "tzdata" -version = "2023.3" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, -] - [[package]] name = "urllib3" -version = "2.0.6" +version = "2.1.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"}, - {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"}, + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "uwsgi" -version = "2.0.22" +version = "2.0.23" description = "The uWSGI server" optional = false python-versions = "*" files = [ - {file = "uwsgi-2.0.22.tar.gz", hash = "sha256:4cc4727258671ac5fa17ab422155e9aaef8a2008ebb86e4404b66deaae965db2"}, -] - -[[package]] -name = "vine" -version = "5.0.0" -description = "Promises, promises, promises." -optional = false -python-versions = ">=3.6" -files = [ - {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, - {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, + {file = "uwsgi-2.0.23.tar.gz", hash = "sha256:0cafda0c16f921db7fe42cfaf81b167cf884ee17350efbdd87d1ecece2d7de37"}, ] [[package]] @@ -1098,26 +807,15 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] -[[package]] -name = "wcwidth" -version = "0.2.8" -description = "Measures the displayed width of unicode strings in a terminal" -optional = false -python-versions = "*" -files = [ - {file = "wcwidth-0.2.8-py2.py3-none-any.whl", hash = "sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704"}, - {file = "wcwidth-0.2.8.tar.gz", hash = "sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"}, -] - [[package]] name = "werkzeug" -version = "3.0.0" +version = "3.0.1" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, - {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, + {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, + {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, ] [package.dependencies] @@ -1129,4 +827,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "ecf08c0bda33050ffd997762e7100b24982c93006fb1f94f703be7f372bb96c4" +content-hash = "08b448fa537b520f27a5a642b51b5562e1eb126604da8f7d4bccd4459afa4fe7" diff --git a/pyproject.toml b/pyproject.toml index 6150e701..02f23edd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,15 +9,9 @@ maintainers = ["Leo McArdle "] [tool.poetry.dependencies] python = "^3.11" beautifulsoup4 = "^4.12.2" -celery = "^5.3.4" -coverage = "^7.3.2" flake8 = "^6.1.0" -httpobs-cli = "^1.0.2" -pynose = "^1.4.8" pep8 = "^1.7.1" psycopg2 = "^2.9.9" -redis = "^5.0.1" -psutil = "^5.9.5" publicsuffixlist = "^0.10.0.20231002" requests = "^2.31.0" Flask = "^3.0.0" @@ -26,6 +20,11 @@ pre-commit = "^3.6.0" black = "^23.12.1" isort = "^5.13.2" +[tool.poetry.group.test.dependencies] +coverage = "^7.4.0" +pynose = "^1.4.8" +urllib3 = "^2.1.0" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" From 4dd603489333c997460f86eefc317b01545d9ca9 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 14:00:32 +0000 Subject: [PATCH 15/31] move valid_hostname func into website module --- httpobs/conf/__init__.py | 3 -- httpobs/conf/httpobs.conf | 1 - httpobs/scanner/local.py | 3 -- httpobs/scanner/utils.py | 43 +------------------ .../tests/unittests/test_valid_hostname.py | 7 +-- httpobs/website/api.py | 2 +- httpobs/website/utils.py | 41 ++++++++++++++++++ 7 files changed, 44 insertions(+), 56 deletions(-) create mode 100644 httpobs/website/utils.py diff --git a/httpobs/conf/__init__.py b/httpobs/conf/__init__.py index 80a02e6c..bf490ca3 100644 --- a/httpobs/conf/__init__.py +++ b/httpobs/conf/__init__.py @@ -83,9 +83,6 @@ def __conf(section, param, type=None, default=None): SCANNER_ALLOW_KICKSTART_NUM_ABORTED = int( environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART_NUM_ABORTED') or __conf('scanner', 'allow_kickstart_num_aborted') ) -SCANNER_ALLOW_LOCALHOST = environ.get('HTTPOBS_SCANNER_ALLOW_LOCALHOST') == 'yes' or __conf( - 'scanner', 'allow_localhost', bool -) SCANNER_BROKER_RECONNECTION_SLEEP_TIME = float( environ.get('HTTPOBS_SCANNER_BROKER_RECONNECTION_SLEEP_TIME') or __conf('scanner', 'broker_reconnection_sleep_time') ) diff --git a/httpobs/conf/httpobs.conf b/httpobs/conf/httpobs.conf index b80654cf..ec8bbe26 100644 --- a/httpobs/conf/httpobs.conf +++ b/httpobs/conf/httpobs.conf @@ -27,7 +27,6 @@ user_agent = Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:98.0) Gecko/201001 abort_scan_time = 1800 allow_kickstart = no allow_kickstart_num_aborted = 5 -allow_localhost = no broker = redis://localhost:6379/0 broker_reconnection_sleep_time = 15 cycle_sleep_time = .5 diff --git a/httpobs/scanner/local.py b/httpobs/scanner/local.py index 18dcc4c3..73a53c96 100644 --- a/httpobs/scanner/local.py +++ b/httpobs/scanner/local.py @@ -1,4 +1,3 @@ -import httpobs.conf from httpobs.scanner.analyzer import NUM_TESTS, tests from httpobs.scanner.grader import get_grade_and_likelihood_for_score, get_score_description from httpobs.scanner.retriever import retrieve_all @@ -42,8 +41,6 @@ def scan(hostname, **kwargs): } } """ - # Always allow localhost scans when run in this way - httpobs.conf.SCANNER_ALLOW_LOCALHOST = True # Attempt to retrieve all the resources, not capturing exceptions reqs = retrieve_all(hostname, **kwargs) diff --git a/httpobs/scanner/utils.py b/httpobs/scanner/utils.py index dcbea95d..771c20ae 100644 --- a/httpobs/scanner/utils.py +++ b/httpobs/scanner/utils.py @@ -1,13 +1,12 @@ import json import os.path -import socket import sys import requests from bs4 import BeautifulSoup as bs from requests.structures import CaseInsensitiveDict -from httpobs.conf import SCANNER_ALLOW_LOCALHOST, SCANNER_PINNED_DOMAINS +from httpobs.conf import SCANNER_PINNED_DOMAINS HSTS_URL = 'https://raw.githubusercontent.com/chromium/chromium/main/net/http/transport_security_state_static.json' @@ -101,46 +100,6 @@ def sanitize_headers(headers: dict) -> dict: return None -def valid_hostname(hostname: str): - """ - :param hostname: The hostname requested in the scan - :return: Hostname if it's valid, None if it's an IP address, otherwise False - """ - - # Block attempts to scan things like 'localhost' if not allowed - if ('.' not in hostname or 'localhost' in hostname) and not SCANNER_ALLOW_LOCALHOST: - return False - - # First, let's try to see if it's an IPv4 address - try: - socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address - return None # If we get this far, it's an IP address and therefore not a valid fqdn - except: - pass - - # And IPv6 - try: - socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 - return None - except: - pass - - # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time - # that the validator is making a network connection -- the same that requests would make. - try: - hostname_ips = socket.getaddrinfo(hostname, 443) - - # This shouldn't trigger, since getaddrinfo should generate saierror if there's no A records. Nevertheless, - # I want to be careful in case of edge cases. This does make it hard to test. - if len(hostname_ips) < 1: - return False - except: - return False - - # If we've made it this far, then everything is good to go! Woohoo! - return hostname - - # allow for this file to be run directly to fetch the HSTS preload list via the debugger # or via the regen script if __name__ == "__main__": diff --git a/httpobs/tests/unittests/test_valid_hostname.py b/httpobs/tests/unittests/test_valid_hostname.py index 82f129e0..ba341df4 100644 --- a/httpobs/tests/unittests/test_valid_hostname.py +++ b/httpobs/tests/unittests/test_valid_hostname.py @@ -1,7 +1,6 @@ from unittest import TestCase -from unittest.mock import patch -from httpobs.scanner.utils import valid_hostname +from httpobs.website.utils import valid_hostname class TestValidHostname(TestCase): @@ -18,7 +17,3 @@ def test_invalid_hostname(self): self.assertFalse(valid_hostname('_spf.google.com')) # no A records self.assertFalse(valid_hostname('127.0.0.1')) self.assertFalse(valid_hostname('2607:f8b0:4009:80b::200e')) - - @patch('httpobs.scanner.utils.SCANNER_ALLOW_LOCALHOST', 'yes') - def test_valid_localhost(self): - self.assertTrue(valid_hostname('localhost')) diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 0effe201..86d0d532 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -9,8 +9,8 @@ from httpobs.scanner import STATES from httpobs.scanner.grader import GRADES, get_score_description from httpobs.scanner.tasks import scan -from httpobs.scanner.utils import valid_hostname from httpobs.website import add_response_headers, sanitized_api_response +from httpobs.website.utils import valid_hostname api = Blueprint('api', __name__) diff --git a/httpobs/website/utils.py b/httpobs/website/utils.py new file mode 100644 index 00000000..65fac948 --- /dev/null +++ b/httpobs/website/utils.py @@ -0,0 +1,41 @@ +import socket + + +def valid_hostname(hostname: str): + """ + :param hostname: The hostname requested in the scan + :return: Hostname if it's valid, None if it's an IP address, otherwise False + """ + + # Block attempts to scan things like 'localhost' + if '.' not in hostname or 'localhost' in hostname: + return False + + # First, let's try to see if it's an IPv4 address + try: + socket.inet_aton(hostname) # inet_aton() will throw an exception if hostname is not a valid IP address + return None # If we get this far, it's an IP address and therefore not a valid fqdn + except: + pass + + # And IPv6 + try: + socket.inet_pton(socket.AF_INET6, hostname) # same as inet_aton(), but for IPv6 + return None + except: + pass + + # Then, try to do a lookup on the hostname; this should return at least one entry and should be the first time + # that the validator is making a network connection -- the same that requests would make. + try: + hostname_ips = socket.getaddrinfo(hostname, 443) + + # This shouldn't trigger, since getaddrinfo should generate saierror if there's no A records. Nevertheless, + # I want to be careful in case of edge cases. This does make it hard to test. + if len(hostname_ips) < 1: + return False + except: + return False + + # If we've made it this far, then everything is good to go! Woohoo! + return hostname From 3d6096f5ee4f5886c34289bf8575b83549c04483 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 16:43:27 +0000 Subject: [PATCH 16/31] remove scanning logic from database.insert_test_results --- httpobs/database/database.py | 35 ++++++++------------------- httpobs/scanner/tasks.py | 47 +++++++++++++++++++++++++++++++----- 2 files changed, 51 insertions(+), 31 deletions(-) diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 1fd63565..56be2edc 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -28,7 +28,6 @@ STATE_STARTING, ) from httpobs.scanner.analyzer import NUM_TESTS -from httpobs.scanner.grader import MINIMUM_SCORE_FOR_EXTRA_CREDIT, get_grade_and_likelihood_for_score class SimpleDatabaseConnection: @@ -129,32 +128,15 @@ def insert_scan_grade(scan_id, scan_grade, scan_score) -> dict: return dict(cur.fetchone()) -# TODO: Separate out some of this logic so it doesn't need to be duplicated in local.scan() -def insert_test_results( - site_id: int, scan_id: int, tests: list, response_headers: dict, status_code: int = None -) -> dict: +def insert_test_results(site_id: int, scan_id: int, data: dict) -> dict: with get_cursor() as cur: - tests_failed = tests_passed = 0 - score_with_extra_credit = uncurved_score = 100 - - for test in tests: + for test in data["tests"]: name = test.pop('name') expectation = test.pop('expectation') passed = test.pop('pass') result = test.pop('result') score_modifier = test.pop('score_modifier') - # Keep track of how many tests passed or failed - if passed: - tests_passed += 1 - else: - tests_failed += 1 - - # And keep track of the score - score_with_extra_credit += score_modifier - if score_modifier < 0: - uncurved_score += score_modifier - # Insert test result to the database cur.execute( """INSERT INTO tests (site_id, scan_id, name, expectation, result, pass, output, score_modifier) @@ -162,11 +144,14 @@ def insert_test_results( (site_id, scan_id, name, expectation, result, passed, dumps(test), score_modifier), ) - # Only record the full score if the uncurved score already receives an A - score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score - - # Now we need to update the scans table - score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score) + scan = data["scan"] + tests_failed = scan["tests_failed"] + tests_passed = scan["tests_passed"] + grade = scan["grade"] + score = scan["score"] + likelihood_indicator = scan["likelihood_indicator"] + response_headers = scan["response_headers"] + status_code = scan["status_code"] # Update the scans table cur.execute( diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py index 6f60e1a4..6e0c46ac 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/tasks.py @@ -3,7 +3,8 @@ from httpobs.conf import DEVELOPMENT_MODE from httpobs.database import insert_test_results, select_site_headers, update_scan_state from httpobs.scanner import STATE_FAILED, STATE_RUNNING -from httpobs.scanner.analyzer import tests +from httpobs.scanner.analyzer import NUM_TESTS, tests +from httpobs.scanner.grader import MINIMUM_SCORE_FOR_EXTRA_CREDIT, get_grade_and_likelihood_for_score from httpobs.scanner.retriever import retrieve_all from httpobs.scanner.utils import sanitize_headers @@ -25,14 +26,48 @@ def scan(hostname: str, site_id: int, scan_id: int): return - # Execute each test, replacing the underscores in the function name with dashes in the test name - # TODO: Get overridden expectations + results = [test(reqs) for test in tests] + response_headers = sanitize_headers(reqs["responses"]["auto"].headers) + status_code = reqs["responses"]["auto"].status_code + + tests_passed = 0 + score_with_extra_credit = uncurved_score = 100 + + for result in results: + passed = result.get("pass") + score_modifier = result.get("score_modifier") + + # Keep track of how many tests passed or failed + if passed: + tests_passed += 1 + + # And keep track of the score + score_with_extra_credit += score_modifier + if score_modifier < 0: + uncurved_score += score_modifier + + # Only record the full score if the uncurved score already receives an A + score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score + + # Now we need to update the scans table + score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score) + return insert_test_results( site_id, scan_id, - [test(reqs) for test in tests], - sanitize_headers(reqs['responses']['auto'].headers), - reqs['responses']['auto'].status_code, + { + "scan": { + "grade": grade, + "likelihood_indicator": likelihood_indicator, + "response_headers": response_headers, + "score": score, + "tests_failed": NUM_TESTS - tests_passed, + "tests_passed": tests_passed, + "tests_quantity": NUM_TESTS, + "status_code": status_code, + }, + "tests": results, + }, ) # the database is down, oh no! From 11098e9548ce7a5de7f04d31fb0461ffd2184d44 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 17:02:50 +0000 Subject: [PATCH 17/31] remove database calls from scanner.tasks.scan --- httpobs/scanner/tasks.py | 129 ++++++++++++++------------------------- httpobs/website/api.py | 35 ++++++++++- 2 files changed, 79 insertions(+), 85 deletions(-) diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py index 6e0c46ac..350708d0 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/tasks.py @@ -1,88 +1,53 @@ -import sys - -from httpobs.conf import DEVELOPMENT_MODE -from httpobs.database import insert_test_results, select_site_headers, update_scan_state -from httpobs.scanner import STATE_FAILED, STATE_RUNNING from httpobs.scanner.analyzer import NUM_TESTS, tests from httpobs.scanner.grader import MINIMUM_SCORE_FOR_EXTRA_CREDIT, get_grade_and_likelihood_for_score from httpobs.scanner.retriever import retrieve_all from httpobs.scanner.utils import sanitize_headers -def scan(hostname: str, site_id: int, scan_id: int): - try: - # Once celery kicks off the task, let's update the scan state from PENDING to RUNNING - update_scan_state(scan_id, STATE_RUNNING) - - # Get the site's cookies and headers - headers = select_site_headers(hostname) - - # Attempt to retrieve all the resources - reqs = retrieve_all(hostname, cookies=headers['cookies'], headers=headers['headers']) - - # If we can't connect at all, let's abort the test - if reqs['responses']['auto'] is None: - update_scan_state(scan_id, STATE_FAILED, error='site down') - - return - - results = [test(reqs) for test in tests] - response_headers = sanitize_headers(reqs["responses"]["auto"].headers) - status_code = reqs["responses"]["auto"].status_code - - tests_passed = 0 - score_with_extra_credit = uncurved_score = 100 - - for result in results: - passed = result.get("pass") - score_modifier = result.get("score_modifier") - - # Keep track of how many tests passed or failed - if passed: - tests_passed += 1 - - # And keep track of the score - score_with_extra_credit += score_modifier - if score_modifier < 0: - uncurved_score += score_modifier - - # Only record the full score if the uncurved score already receives an A - score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score - - # Now we need to update the scans table - score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score) - - return insert_test_results( - site_id, - scan_id, - { - "scan": { - "grade": grade, - "likelihood_indicator": likelihood_indicator, - "response_headers": response_headers, - "score": score, - "tests_failed": NUM_TESTS - tests_passed, - "tests_passed": tests_passed, - "tests_quantity": NUM_TESTS, - "status_code": status_code, - }, - "tests": results, - }, - ) - - # the database is down, oh no! - except IOError: - print('database down, aborting scan on {hostname}'.format(hostname=hostname), file=sys.stderr) - except: - # TODO: have more specific error messages - e = sys.exc_info()[1] # get the error message - - # If we are unsuccessful, close out the scan in the database - update_scan_state(scan_id, STATE_FAILED, error=repr(e)) - - # Print the exception to stderr if we're in dev - if DEVELOPMENT_MODE: - import traceback - - print('Error detected in scan for : ' + hostname) - traceback.print_exc(file=sys.stderr) +def scan(hostname: str, site_id: int, scan_id: int, headers: dict): + # Attempt to retrieve all the resources + reqs = retrieve_all(hostname, cookies=headers['cookies'], headers=headers['headers']) + + # If we can't connect at all, let's abort the test + if reqs['responses']['auto'] is None: + return + + results = [test(reqs) for test in tests] + response_headers = sanitize_headers(reqs["responses"]["auto"].headers) + status_code = reqs["responses"]["auto"].status_code + + tests_passed = 0 + score_with_extra_credit = uncurved_score = 100 + + for result in results: + passed = result.get("pass") + score_modifier = result.get("score_modifier") + + # Keep track of how many tests passed or failed + if passed: + tests_passed += 1 + + # And keep track of the score + score_with_extra_credit += score_modifier + if score_modifier < 0: + uncurved_score += score_modifier + + # Only record the full score if the uncurved score already receives an A + score = score_with_extra_credit if uncurved_score >= MINIMUM_SCORE_FOR_EXTRA_CREDIT else uncurved_score + + # Now we need to update the scans table + score, grade, likelihood_indicator = get_grade_and_likelihood_for_score(score) + + return { + "scan": { + "grade": grade, + "likelihood_indicator": likelihood_indicator, + "response_headers": response_headers, + "score": score, + "tests_failed": NUM_TESTS - tests_passed, + "tests_passed": tests_passed, + "tests_quantity": NUM_TESTS, + "status_code": status_code, + }, + "tests": results, + } diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 86d0d532..98fded60 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -1,12 +1,13 @@ import json import os.path +import sys from flask import Blueprint, jsonify, make_response, request from werkzeug.http import http_date import httpobs.database as database -from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN -from httpobs.scanner import STATES +from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN, DEVELOPMENT_MODE +from httpobs.scanner import STATE_FAILED, STATE_RUNNING, STATES from httpobs.scanner.grader import GRADES, get_score_description from httpobs.scanner.tasks import scan from httpobs.website import add_response_headers, sanitized_api_response @@ -66,7 +67,35 @@ def api_post_scan_hostname(): # Begin the dispatch process if it was a POST if request.method == 'POST': row = database.insert_scan(site_id, hidden=hidden) - row = scan(hostname, site_id, row["id"]) + scan_id = row["id"] + + # Once celery kicks off the task, let's update the scan state from PENDING to RUNNING + database.update_scan_state(scan_id, STATE_RUNNING) + + # Get the site's cookies and headers + headers = database.select_site_headers(hostname) + + try: + result = scan(hostname, site_id, scan_id, headers) + + if result is None: + row = database.update_scan_state(scan_id, STATE_FAILED, error="site down") + else: + row = database.insert_test_results( + site_id, + scan_id, + result, + ) + except Exception as e: + # If we are unsuccessful, close out the scan in the database + row = database.update_scan_state(scan_id, STATE_FAILED, error=repr(e)) + + # Print the exception to stderr if we're in dev + if DEVELOPMENT_MODE: + import traceback + + print("Error detected in scan for : " + hostname) + traceback.print_exc(file=sys.stderr) else: return { 'error': 'recent-scan-not-found', From d733972dc465a4d1dfab361ab1e95a8ceda4c77f Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 13:35:09 +0000 Subject: [PATCH 18/31] remove unused files --- httpobs/scripts/httpobs-mass-scan | 79 ------------------------- httpobs/tests/unittests/test_environ.py | 55 ----------------- 2 files changed, 134 deletions(-) delete mode 100755 httpobs/scripts/httpobs-mass-scan delete mode 100644 httpobs/tests/unittests/test_environ.py diff --git a/httpobs/scripts/httpobs-mass-scan b/httpobs/scripts/httpobs-mass-scan deleted file mode 100755 index 49784362..00000000 --- a/httpobs/scripts/httpobs-mass-scan +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import print_function - -import os -import sys -import time - -import grequests -import requests - -from httpobs.conf import API_URL - -if 'HTTPOBS_DEV' in os.environ: # TODO: use httpobs.conf - MAX_QUEUE = 64 -else: - MAX_QUEUE = 256 - -if __name__ == '__main__': - if len(sys.argv) < 2: - print('Usage: ' + sys.argv[0] + ' ') - sys.exit(1) - - start_time = time.time() - total_scanned = 0 - - s = requests.Session() - - try: - with open(sys.argv[1], 'r') as alexafp: - hosts = [host.strip().split(',')[1] if ',' in host else host.strip() for host in alexafp] - except: - print('Cannot open ' + sys.argv[1]) - sys.exit(1) - - while True: - loop_time = time.time() - - # Get the queue availability - try: - r = s.get(API_URL + '/__stats__?verbose=true').json()['states'] - except: - time.sleep(5) - continue - - available = MAX_QUEUE - r.get('PENDING', 0) - r.get('RUNNING', 0) - r.get('STARTING', 0) - - print( - 'Queue availability: {queue_avail}. Total scanned: {total_scanned}. Pending: {pending}.'.format( - queue_avail=available, total_scanned=total_scanned, pending=r.get('PENDING', 0) - ) - ) - - # Quit if the scanner reports that nothing is pending - if not hosts and r.get('PENDING', 0) == 0: - break - - if available > 0: - targets = hosts[:available] - urls = [API_URL + '/analyze?host=' + host for host in targets] - total_scanned += available - - # Start up a new mass scan - try: - rs = (grequests.post(u) for u in urls) - grequests.map(rs) - except: - time.sleep(5) - raise - - hosts = hosts[available:] - - # If the previous loop completely quickly, cooldown a moment - if time.time() - loop_time < 5: - time.sleep(5) - - total_time = int(time.time() - start_time) - print('Elapsed time: {elapsed_time}s'.format(elapsed_time=total_time)) - print('Scans/sec: {speed}'.format(speed=total_scanned / total_time)) diff --git a/httpobs/tests/unittests/test_environ.py b/httpobs/tests/unittests/test_environ.py deleted file mode 100644 index 17b8ee37..00000000 --- a/httpobs/tests/unittests/test_environ.py +++ /dev/null @@ -1,55 +0,0 @@ -# TODO: Revisit the SystemExit things when we have time -# from os import environ -# from unittest import TestCase -# -# -# class TestEnvironmentalVariables(TestCase): -# def test_no_broker_url(self): -# def __import_scanner_celeryconfig_no_broker_url(): -# import httpobs.scanner.celeryconfig -# if httpobs.scanner.celeryconfig.CELERY_IGNORE_RESULTS: -# pass -# -# def __import_database_celeryconfig_no_broker_url(): -# import httpobs.scanner.celeryconfig -# if httpobs.scanner.celeryconfig.CELERY_IGNORE_RESULTS: -# pass -# -# if 'BROKER_URL' in environ: -# BROKER_URL = environ['BROKER_URL'] -# del environ['BROKER_URL'] -# else: -# BROKER_URL = None -# -# self.assertRaises(SystemExit, __import_database_celeryconfig_no_broker_url) -# self.assertRaises(SystemExit, __import_scanner_celeryconfig_no_broker_url) -# -# if BROKER_URL: -# environ['BROKER_URL'] = BROKER_URL -# -# # Mock this -# # def test_broker_url(self): -# # environ['BROKER_URL'] = 'foo' -# # -# # import httpobs.database.celeryconfig -# # import httpobs.scanner.celeryconfig -# # -# # self.assertTrue(httpobs.database.celeryconfig.CELERY_IGNORE_RESULTS) -# # self.assertTrue(httpobs.scanner.celeryconfig.CELERY_IGNORE_RESULTS) -# -# def test_no_database_url(self): -# def __import_database_no_database_url(): -# import httpobs.database.database -# if httpobs.database.database.conn: -# pass -# -# if 'HTTPOBS_DATABASE_URL' in environ: -# HTTPOBS_DATABASE_URL = environ['HTTPOBS_DATABASE_URL'] -# del environ['HTTPOBS_DATABASE_URL'] -# else: -# HTTPOBS_DATABASE_URL = None -# -# self.assertRaises(SystemExit, __import_database_no_database_url) -# -# if HTTPOBS_DATABASE_URL: -# environ['HTTPOBS_DATABASE_URL'] = HTTPOBS_DATABASE_URL From af3aa27ca140517f0b394a6270bb4eaa61b9d685 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Wed, 3 Jan 2024 13:22:41 +0000 Subject: [PATCH 19/31] remove unused config variables --- .github/workflows/test.yml | 1 - httpobs/conf/__init__.py | 35 +---------------------------------- httpobs/conf/httpobs.conf | 10 ---------- 3 files changed, 1 insertion(+), 45 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f183da51..edfa90f7 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -18,7 +18,6 @@ jobs: matrix: python-version: [ '3.11' ] env: - HTTPOBS_BROKER_URL: fakebrokerurl HTTPOBS_DATABASE_HOST: fakehost HTTPOBS_DATABASE_PASS: foo HTTPOBS_DATABASE_USER: bar diff --git a/httpobs/conf/__init__.py b/httpobs/conf/__init__.py index bf490ca3..40d03c1f 100644 --- a/httpobs/conf/__init__.py +++ b/httpobs/conf/__init__.py @@ -1,7 +1,7 @@ import configparser import os.path import sys -from os import cpu_count, environ +from os import environ # Read in the default config file if /etc/httpobs.conf doesn't already exist __dirname = os.path.abspath(os.path.dirname(__file__)) @@ -49,9 +49,6 @@ def __conf(section, param, type=None, default=None): ) API_URL = environ.get('HTTPOBS_API_URL') or __conf('api', 'url') -# Broker configuration -BROKER_URL = environ.get('HTTPOBS_BROKER_URL') or __conf('scanner', 'broker') - # Database configuration DATABASE_DB = environ.get('HTTPOBS_DATABASE_DB') or __conf('database', 'database') DATABASE_HOST = environ.get('HTTPOBS_DATABASE_HOST') or __conf('database', 'host') @@ -77,36 +74,6 @@ def __conf(section, param, type=None, default=None): # Scanner configuration SCANNER_ABORT_SCAN_TIME = int(environ.get('HTTPOBS_SCANNER_ABORT_SCAN_TIME') or __conf('scanner', 'abort_scan_time')) -SCANNER_ALLOW_KICKSTART = environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART') == 'yes' or __conf( - 'scanner', 'allow_kickstart', bool -) -SCANNER_ALLOW_KICKSTART_NUM_ABORTED = int( - environ.get('HTTPOBS_SCANNER_ALLOW_KICKSTART_NUM_ABORTED') or __conf('scanner', 'allow_kickstart_num_aborted') -) -SCANNER_BROKER_RECONNECTION_SLEEP_TIME = float( - environ.get('HTTPOBS_SCANNER_BROKER_RECONNECTION_SLEEP_TIME') or __conf('scanner', 'broker_reconnection_sleep_time') -) -SCANNER_CYCLE_SLEEP_TIME = float( - environ.get('HTTPOBS_SCANNER_CYCLE_SLEEP_TIME') or __conf('scanner', 'cycle_sleep_time') -) -SCANNER_DATABASE_RECONNECTION_SLEEP_TIME = float( - environ.get('HTTPOBS_SCANNER_DATABASE_RECONNECTION_SLEEP_TIME') - or __conf('scanner', 'database_reconnection_sleep_time') -) -SCANNER_MAINTENANCE_CYCLE_FREQUENCY = int( - environ.get('HTTPOBS_MAINTENANCE_CYCLE_FREQUENCY') or __conf('scanner', 'maintenance_cycle_frequency') -) -SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY = int( - environ.get('HTTPOBS_SCANNER_MATERIALIZED_VIEW_REFRESH_FREQUENCY') - or __conf('scanner', 'materialized_view_refresh_frequency') -) -SCANNER_MAX_CPU_UTILIZATION = int( - environ.get('HTTPOBS_SCANNER_MAX_CPU_UTILIZATION') or __conf('scanner', 'max_cpu_utilization') -) -SCANNER_MAX_LOAD_RATIO = int( - environ.get('HTTPOBS_SCANNER_MAX_LOAD_RATIO_PER_CPU') or __conf('scanner', 'max_load_ratio_per_cpu') -) -SCANNER_MAX_LOAD = cpu_count() * SCANNER_MAX_LOAD_RATIO SCANNER_MOZILLA_DOMAINS = [ domain.strip() for domain in (environ.get('HTTPOBS_SCANNER_MOZILLA_DOMAINS') or __conf('scanner', 'mozilla_domains')).split(',') diff --git a/httpobs/conf/httpobs.conf b/httpobs/conf/httpobs.conf index ec8bbe26..a4bb3d9b 100644 --- a/httpobs/conf/httpobs.conf +++ b/httpobs/conf/httpobs.conf @@ -25,15 +25,5 @@ user_agent = Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:98.0) Gecko/201001 [scanner] abort_scan_time = 1800 -allow_kickstart = no -allow_kickstart_num_aborted = 5 -broker = redis://localhost:6379/0 -broker_reconnection_sleep_time = 15 -cycle_sleep_time = .5 -database_reconnection_sleep_time = 5 -maintenance_cycle_frequency = 900 -materialized_view_refresh_frequency = 3600 -max_cpu_utilization = 90 -max_load_ratio_per_cpu = 3 mozilla_domains = mozilla,allizom,browserid,firefox,persona,taskcluster,webmaker pinned_domains = accounts.firefox.com,addons.mozilla.org,aus4.mozilla.org,aus5.mozilla.org,cdn.mozilla.org,services.mozilla.com From 605fcf79f3470e8d6646eca331d20782ecaa8c1b Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Thu, 4 Jan 2024 13:25:15 +0000 Subject: [PATCH 20/31] reduce imports from scanner in database, remove unused database functions --- httpobs/database/__init__.py | 4 --- httpobs/database/database.py | 58 +++++++----------------------------- httpobs/scanner/tasks.py | 2 ++ 3 files changed, 12 insertions(+), 52 deletions(-) diff --git a/httpobs/database/__init__.py b/httpobs/database/__init__.py index b93a9df6..cffca495 100644 --- a/httpobs/database/__init__.py +++ b/httpobs/database/__init__.py @@ -1,7 +1,6 @@ from .database import ( get_cursor, insert_scan, - insert_scan_grade, insert_test_results, periodic_maintenance, refresh_materialized_views, @@ -14,13 +13,11 @@ select_star_from, select_test_results, update_scan_state, - update_scans_dequeue_scans, ) __all__ = [ 'get_cursor', 'insert_scan', - 'insert_scan_grade', 'insert_test_results', 'refresh_materialized_views', 'select_scan_host_history', @@ -33,5 +30,4 @@ 'select_test_results', 'update_scan_state', 'periodic_maintenance', - 'update_scans_dequeue_scans', ] diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 56be2edc..657896fd 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -19,15 +19,7 @@ DATABASE_USER, SCANNER_ABORT_SCAN_TIME, ) -from httpobs.scanner import ( - ALGORITHM_VERSION, - STATE_ABORTED, - STATE_FAILED, - STATE_FINISHED, - STATE_PENDING, - STATE_STARTING, -) -from httpobs.scanner.analyzer import NUM_TESTS +from httpobs.scanner import STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_RUNNING class SimpleDatabaseConnection: @@ -105,24 +97,10 @@ def get_cursor(): def insert_scan(site_id: int, hidden: bool = False) -> dict: with get_cursor() as cur: cur.execute( - """INSERT INTO scans (site_id, state, start_time, algorithm_version, tests_quantity, hidden) - VALUES (%s, %s, NOW(), %s, %s, %s) + """INSERT INTO scans (site_id, state, start_time, tests_quantity, hidden) + VALUES (%s, %s, NOW(), 0, %s) RETURNING *""", - (site_id, STATE_PENDING, ALGORITHM_VERSION, NUM_TESTS, hidden), - ) - - return dict(cur.fetchone()) - - -def insert_scan_grade(scan_id, scan_grade, scan_score) -> dict: - with get_cursor() as cur: - cur.execute( - """UPDATE scans - SET (grade, score) = - (%s, %s) - WHERE id = %s - RETURNING *""", - (scan_grade, scan_score, scan_id), + (site_id, STATE_RUNNING, hidden), ) return dict(cur.fetchone()) @@ -145,8 +123,10 @@ def insert_test_results(site_id: int, scan_id: int, data: dict) -> dict: ) scan = data["scan"] + algorithm_version = scan["algorithm_version"] tests_failed = scan["tests_failed"] tests_passed = scan["tests_passed"] + tests_quantity = scan["tests_quantity"] grade = scan["grade"] score = scan["score"] likelihood_indicator = scan["likelihood_indicator"] @@ -157,8 +137,8 @@ def insert_test_results(site_id: int, scan_id: int, data: dict) -> dict: cur.execute( """UPDATE scans SET (end_time, tests_failed, tests_passed, grade, score, likelihood_indicator, - state, response_headers, status_code) = - (NOW(), %s, %s, %s, %s, %s, %s, %s, %s) + state, response_headers, status_code, algorithm_version, tests_quantity) = + (NOW(), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) WHERE id = %s RETURNING *""", ( @@ -170,6 +150,8 @@ def insert_test_results(site_id: int, scan_id: int, data: dict) -> dict: STATE_FINISHED, dumps(response_headers), status_code, + algorithm_version, + tests_quantity, scan_id, ), ) @@ -435,23 +417,3 @@ def update_scan_state(scan_id, state: str, error=None) -> dict: row = dict(cur.fetchone()) return row - - -def update_scans_dequeue_scans(num_to_dequeue: int = 0) -> dict: - with get_cursor() as cur: - cur.execute( - """UPDATE scans - SET state = %s - FROM ( - SELECT sites.domain, scans.site_id, scans.id AS scan_id, scans.state - FROM scans - INNER JOIN sites ON scans.site_id = sites.id - WHERE state = %s - LIMIT %s - FOR UPDATE) sub - WHERE scans.id = sub.scan_id - RETURNING sub.domain, sub.site_id, sub.scan_id""", - (STATE_STARTING, STATE_PENDING, num_to_dequeue), - ) - - return cur.fetchall() diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py index 350708d0..e5e55491 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/tasks.py @@ -1,3 +1,4 @@ +from httpobs.scanner import ALGORITHM_VERSION from httpobs.scanner.analyzer import NUM_TESTS, tests from httpobs.scanner.grader import MINIMUM_SCORE_FOR_EXTRA_CREDIT, get_grade_and_likelihood_for_score from httpobs.scanner.retriever import retrieve_all @@ -40,6 +41,7 @@ def scan(hostname: str, site_id: int, scan_id: int, headers: dict): return { "scan": { + "algorithm_version": ALGORITHM_VERSION, "grade": grade, "likelihood_indicator": likelihood_indicator, "response_headers": response_headers, From 2a5cfeabe2dc6e60e4f7d30e64bee24f9230ef02 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Thu, 4 Jan 2024 13:31:47 +0000 Subject: [PATCH 21/31] move states out of scanner --- httpobs/__init__.py | 10 ++++++++++ httpobs/database/database.py | 2 +- httpobs/scanner/__init__.py | 22 ---------------------- httpobs/website/api.py | 2 +- 4 files changed, 12 insertions(+), 24 deletions(-) diff --git a/httpobs/__init__.py b/httpobs/__init__.py index 3fdbf744..0344c530 100644 --- a/httpobs/__init__.py +++ b/httpobs/__init__.py @@ -1,2 +1,12 @@ SOURCE_URL = 'https://github.com/mozilla/http-observatory' VERSION = '0.9.3' + +# The various statuses +STATE_ABORTED = 'ABORTED' +STATE_FAILED = 'FAILED' +STATE_FINISHED = 'FINISHED' +STATE_PENDING = 'PENDING' +STATE_STARTING = 'STARTING' +STATE_RUNNING = 'RUNNING' + +STATES = (STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_PENDING, STATE_RUNNING, STATE_STARTING) diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 657896fd..2d003c4f 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -8,6 +8,7 @@ import psycopg2.extras import psycopg2.pool +from httpobs import STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_RUNNING from httpobs.conf import ( API_CACHED_RESULT_TIME, DATABASE_CA_CERT, @@ -19,7 +20,6 @@ DATABASE_USER, SCANNER_ABORT_SCAN_TIME, ) -from httpobs.scanner import STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_RUNNING class SimpleDatabaseConnection: diff --git a/httpobs/scanner/__init__.py b/httpobs/scanner/__init__.py index 3ac10ee3..81f393ba 100644 --- a/httpobs/scanner/__init__.py +++ b/httpobs/scanner/__init__.py @@ -1,24 +1,2 @@ # Current algorithm version ALGORITHM_VERSION = 2 - -# The various statuses -STATE_ABORTED = 'ABORTED' -STATE_FAILED = 'FAILED' -STATE_FINISHED = 'FINISHED' -STATE_PENDING = 'PENDING' -STATE_STARTING = 'STARTING' -STATE_RUNNING = 'RUNNING' - -STATES = (STATE_ABORTED, STATE_FAILED, STATE_FINISHED, STATE_PENDING, STATE_RUNNING, STATE_STARTING) - -__all__ = [ - 'ALGORITHM_VERSION', - 'NUM_TESTS', - 'STATES', - 'STATE_ABORTED', - 'STATE_FAILED', - 'STATE_FINISHED', - 'STATE_PENDING', - 'STATE_RUNNING', - 'STATE_STARTING', -] diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 98fded60..230ba304 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -6,8 +6,8 @@ from werkzeug.http import http_date import httpobs.database as database +from httpobs import STATE_FAILED, STATE_RUNNING, STATES from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN, DEVELOPMENT_MODE -from httpobs.scanner import STATE_FAILED, STATE_RUNNING, STATES from httpobs.scanner.grader import GRADES, get_score_description from httpobs.scanner.tasks import scan from httpobs.website import add_response_headers, sanitized_api_response From fa4e83f5895300511582f2e0db609654ff7c30f2 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Thu, 4 Jan 2024 16:14:33 +0000 Subject: [PATCH 22/31] merge scanner.local.scan into scanner.tasks.scan --- httpobs/database/database.py | 4 +- httpobs/scanner/local.py | 75 ++---------------------------------- httpobs/scanner/tasks.py | 52 ++++++++++++++++++++++--- httpobs/website/api.py | 6 +-- 4 files changed, 55 insertions(+), 82 deletions(-) diff --git a/httpobs/database/database.py b/httpobs/database/database.py index 2d003c4f..d9f0a65b 100644 --- a/httpobs/database/database.py +++ b/httpobs/database/database.py @@ -108,12 +108,12 @@ def insert_scan(site_id: int, hidden: bool = False) -> dict: def insert_test_results(site_id: int, scan_id: int, data: dict) -> dict: with get_cursor() as cur: - for test in data["tests"]: - name = test.pop('name') + for name, test in data["tests"].items(): expectation = test.pop('expectation') passed = test.pop('pass') result = test.pop('result') score_modifier = test.pop('score_modifier') + del test["score_description"] # Insert test result to the database cur.execute( diff --git a/httpobs/scanner/local.py b/httpobs/scanner/local.py index 73a53c96..dc0a82aa 100644 --- a/httpobs/scanner/local.py +++ b/httpobs/scanner/local.py @@ -1,73 +1,4 @@ -from httpobs.scanner.analyzer import NUM_TESTS, tests -from httpobs.scanner.grader import get_grade_and_likelihood_for_score, get_score_description -from httpobs.scanner.retriever import retrieve_all +from httpobs.scanner.tasks import scan - -def scan(hostname, **kwargs): - """Performs an Observatory scan, but doesn't require any database/redis - backing. Given the lowered security concerns due to not being a public - API, you can use this to scan arbitrary ports and paths. - - Args: - hostname (str): domain name for host to be scanned. Must not include - protocol (http://, https://) or port number (:80). - - Kwargs: - http_port (int): port to scan for HTTP, instead of 80 - https_port (int): port to be scanned for HTTPS, instead of 443 - path (str): path to scan, instead of "/" - verify (bool): whether to enable or disable certificate verification, - enabled by default. This can allow tested sites to pass the HSTS - and HPKP tests, even with self-signed certificates. - - cookies (dict): Cookies sent to the system being scanned. Matches the - requests cookie dict. - headers (dict): HTTP headers sent to the system being scanned. Format - matches the requests headers dict. - - Returns: - A dict representing the analyze (scan) and getScanResults (test) API call. Example: - - { - 'scan': { - 'grade': 'A' - ... - }, - 'test': { - 'content-security-policy': { - 'pass': True - ... - } - } - } - """ - - # Attempt to retrieve all the resources, not capturing exceptions - reqs = retrieve_all(hostname, **kwargs) - - # If we can't connect at all, let's abort the test - if reqs['responses']['auto'] is None: - return {'error': 'site down'} - - # Get all the results - results = [test(reqs) for test in tests] - for result in results: - result['score_description'] = get_score_description(result['result']) - - # Get the score, grade, etc. - grades = get_grade_and_likelihood_for_score(100 + sum([result.get('score_modifier', 0) for result in results])) - tests_passed = sum([1 if result.get('pass') else 0 for result in results]) - - # Return the results - return { - 'scan': { - 'grade': grades[1], - 'likelihood_indicator': grades[2], - 'response_headers': dict(reqs['responses']['auto'].headers), - 'score': grades[0], - 'tests_failed': NUM_TESTS - tests_passed, - 'tests_passed': tests_passed, - 'tests_quantity': NUM_TESTS, - }, - 'tests': {result.pop('name'): result for result in results}, - } +# for backwards compatibility, so consumers can continue to use httpobs.scanner.local.scan +__all__ = ["scan"] diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/tasks.py index e5e55491..9a14b183 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/tasks.py @@ -1,17 +1,57 @@ from httpobs.scanner import ALGORITHM_VERSION from httpobs.scanner.analyzer import NUM_TESTS, tests -from httpobs.scanner.grader import MINIMUM_SCORE_FOR_EXTRA_CREDIT, get_grade_and_likelihood_for_score +from httpobs.scanner.grader import ( + MINIMUM_SCORE_FOR_EXTRA_CREDIT, + get_grade_and_likelihood_for_score, + get_score_description, +) from httpobs.scanner.retriever import retrieve_all from httpobs.scanner.utils import sanitize_headers -def scan(hostname: str, site_id: int, scan_id: int, headers: dict): +def scan(hostname: str, **kwargs): + """Performs an Observatory scan. + + Args: + hostname (str): domain name for host to be scanned. Must not include + protocol (http://, https://) or port number (:80). + + Kwargs: + http_port (int): port to scan for HTTP, instead of 80 + https_port (int): port to be scanned for HTTPS, instead of 443 + path (str): path to scan, instead of "/" + verify (bool): whether to enable or disable certificate verification, + enabled by default. This can allow tested sites to pass the HSTS + and HPKP tests, even with self-signed certificates. + + cookies (dict): Cookies sent to the system being scanned. Matches the + requests cookie dict. + headers (dict): HTTP headers sent to the system being scanned. Format + matches the requests headers dict. + + Returns: + A dict representing the analyze (scan) and getScanResults (test) API call. Example: + + { + 'scan': { + 'grade': 'A' + ... + }, + 'test': { + 'content-security-policy': { + 'pass': True + ... + } + } + } + """ + # Attempt to retrieve all the resources - reqs = retrieve_all(hostname, cookies=headers['cookies'], headers=headers['headers']) + reqs = retrieve_all(hostname, **kwargs) # If we can't connect at all, let's abort the test if reqs['responses']['auto'] is None: - return + return {'error': 'site down'} results = [test(reqs) for test in tests] response_headers = sanitize_headers(reqs["responses"]["auto"].headers) @@ -21,6 +61,8 @@ def scan(hostname: str, site_id: int, scan_id: int, headers: dict): score_with_extra_credit = uncurved_score = 100 for result in results: + result["score_description"] = get_score_description(result['result']) + passed = result.get("pass") score_modifier = result.get("score_modifier") @@ -51,5 +93,5 @@ def scan(hostname: str, site_id: int, scan_id: int, headers: dict): "tests_quantity": NUM_TESTS, "status_code": status_code, }, - "tests": results, + "tests": {result.pop("name"): result for result in results}, } diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 230ba304..f1028ae9 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -76,10 +76,10 @@ def api_post_scan_hostname(): headers = database.select_site_headers(hostname) try: - result = scan(hostname, site_id, scan_id, headers) + result = scan(hostname, headers=headers.get("headers", {}), cookies=headers.get("cookies", {})) - if result is None: - row = database.update_scan_state(scan_id, STATE_FAILED, error="site down") + if "error" in result: + row = database.update_scan_state(scan_id, STATE_FAILED, error=result["error"]) else: row = database.insert_test_results( site_id, From eeb938e166f76d4f801dabe014c6e350b9efa0a6 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Thu, 4 Jan 2024 16:36:44 +0000 Subject: [PATCH 23/31] move scanner.tasks.scan to scanner.scan --- httpobs/scanner/__init__.py | 5 +++-- httpobs/scanner/local.py | 2 +- httpobs/scanner/{tasks.py => scanner.py} | 4 +++- httpobs/website/api.py | 2 +- 4 files changed, 8 insertions(+), 5 deletions(-) rename httpobs/scanner/{tasks.py => scanner.py} (98%) diff --git a/httpobs/scanner/__init__.py b/httpobs/scanner/__init__.py index 81f393ba..bb1bf30c 100644 --- a/httpobs/scanner/__init__.py +++ b/httpobs/scanner/__init__.py @@ -1,2 +1,3 @@ -# Current algorithm version -ALGORITHM_VERSION = 2 +from .scanner import scan + +__all__ = ["scan"] diff --git a/httpobs/scanner/local.py b/httpobs/scanner/local.py index dc0a82aa..36485fac 100644 --- a/httpobs/scanner/local.py +++ b/httpobs/scanner/local.py @@ -1,4 +1,4 @@ -from httpobs.scanner.tasks import scan +from httpobs.scanner import scan # for backwards compatibility, so consumers can continue to use httpobs.scanner.local.scan __all__ = ["scan"] diff --git a/httpobs/scanner/tasks.py b/httpobs/scanner/scanner.py similarity index 98% rename from httpobs/scanner/tasks.py rename to httpobs/scanner/scanner.py index 9a14b183..ba0f4984 100644 --- a/httpobs/scanner/tasks.py +++ b/httpobs/scanner/scanner.py @@ -1,4 +1,3 @@ -from httpobs.scanner import ALGORITHM_VERSION from httpobs.scanner.analyzer import NUM_TESTS, tests from httpobs.scanner.grader import ( MINIMUM_SCORE_FOR_EXTRA_CREDIT, @@ -8,6 +7,9 @@ from httpobs.scanner.retriever import retrieve_all from httpobs.scanner.utils import sanitize_headers +# Current algorithm version +ALGORITHM_VERSION = 2 + def scan(hostname: str, **kwargs): """Performs an Observatory scan. diff --git a/httpobs/website/api.py b/httpobs/website/api.py index f1028ae9..ced312bf 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -8,8 +8,8 @@ import httpobs.database as database from httpobs import STATE_FAILED, STATE_RUNNING, STATES from httpobs.conf import API_ALLOW_VERBOSE_STATS_FROM_PUBLIC, API_COOLDOWN, DEVELOPMENT_MODE +from httpobs.scanner import scan from httpobs.scanner.grader import GRADES, get_score_description -from httpobs.scanner.tasks import scan from httpobs.website import add_response_headers, sanitized_api_response from httpobs.website.utils import valid_hostname From f9c3b6d13a3c827f0223ddfa2b6e4bf0e328bea0 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Thu, 4 Jan 2024 16:58:52 +0000 Subject: [PATCH 24/31] update README, add httpobs-local-scan script to poetry --- README.md | 96 +++++-------------- .../scripts/{httpobs-local-scan => scan.py} | 11 ++- pyproject.toml | 3 + 3 files changed, 33 insertions(+), 77 deletions(-) rename httpobs/scripts/{httpobs-local-scan => scan.py} (97%) diff --git a/README.md b/README.md index 18d578a8..5e9aa633 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Mozilla HTTP Observatory - [![Build Status](https://travis-ci.org/april/http-observatory.svg?branch=master)](https://travis-ci.org/april/http-observatory) [![Requirements Status](https://requires.io/github/mozilla/http-observatory/requirements.svg?branch=master)](https://requires.io/github/mozilla/http-observatory/requirements/?branch=master) +# Mozilla HTTP Observatory The Mozilla HTTP Observatory is a set of tools to analyze your website and inform you if you are utilizing the many available methods to secure it. @@ -19,28 +19,40 @@ Sites can be scanned using: ## Contributing ### Prerequisites -* Python 3.7 +* Python 3.11 * Git -* pip3 +* pip #### Notes -These instructions assume that you have a working Python3.7 development environment with `pip3` installed and capable of building requirements, which may require installing an additional python OS package (`-dev`, `-devel`). +These instructions assume that you have a working Python3.11 development environment with `pip` installed and capable of building requirements, which may require installing an additional python OS package (`-dev`, `-devel`). -If this is not appropriate for your environment, you may install the appropriate requirements using your OS package manager (or other means) and skip the `pip3 -r requirements` command. +```bash +# Clone the code +$ git clone https://github.com/mozilla/http-observatory.git +$ cd http-observatory +# Install poetry +$ pip install poetry +# Install the project dependencies and scripts +$ poetry install +# Activate the virtual environment +$ poetry shell +# Install the pre-commit hooks +$ pre-commit install +``` ## Running a scan from the local codebase, without DB, for continuous integration ```bash # Install the HTTP Observatory $ git clone https://github.com/mozilla/http-observatory.git $ cd http-observatory -$ pip3 install --upgrade . -$ pip3 install --upgrade -r requirements.txt +$ pip install poetry +$ poetry install ``` -### Using the local scanner function calls +### Using the scanner function calls ```python ->>> from httpobs.scanner.local import scan +>>> from httpobs.scanner import scan >>> scan('observatory.mozilla.org') # a scan with default options >>> scan('observatory.mozilla.org', # all the custom options http_port=8080, # http server runs on port 8080 @@ -53,75 +65,11 @@ $ pip3 install --upgrade -r requirements.txt ### The same, but with the local CLI ```bash +$ poetry shell $ httpobs-local-scan --http-port 8080 --https-port 8443 --path '/foo/bar' \ --cookies '{"foo": "bar"}' --headers '{"X-Foo": "bar"}' --no-verify mozilla.org ``` -## Running a local scanner with Docker -* Install [Docker Toolbox](https://docs.docker.com/toolbox/overview/) and [VirtualBox](https://www.virtualbox.org/wiki/Downloads) - -```bash -# Install the HTTP Observatory client and requests library -$ git clone https://github.com/mozilla/http-observatory.git -$ cd http-observatory -$ pip3 install . -$ pip3 install --upgrade requests - -# Create docker machine -$ docker-machine create --driver virtualbox --virtualbox-disk-size "40000" http-observatory - -# Save the URL to the API in your .profile, .bash_profile, or whatever -$ echo export HTTPOBS_API_URL=http://$(docker-machine ip http-observatory):57001/api/v1 >> ~/.profile -$ . ~/.profile - -# Start up the docker instance and install all the pieces -$ eval $(docker-machine env http-observatory) -$ docker-compose up -d -``` - -## Creating a local installation (tested on Ubuntu 15) -``` -# Install git, postgresql, and redis -# sudo -s -# apt-get install -y git libpq-dev postgresql redis-server - -# Clone the repo -# cd /opt -# git clone https://github.com/mozilla/http-observatory.git -# cd http-observatory - -# Install the observatory and scanner -# pip install . -# pip3 install -r requirements.txt - -# Install the database -# su - postgres -$ createdb http_observatory -$ psql http_observatory < httpobs/database/schema.sql -$ psql http_observatory -http_observatory=# \password httpobsapi -http_observatory=# \password httpobsscanner -# vi /etc/postgresql/9.4/main/postgresql.conf (set max_connections = 512, shared_buffers = 256MB) -# service postgresql restart - -# Create the httpobs user, and log/pid directories -# useradd -m httpobs -# install -m 750 -o httpobs -g httpobs -d /var/run/httpobs /var/log/httpobs - -# Update the environmental variables -# su - httpobs -$ echo export HTTPOBS_API_URL="http://localhost:57001/api/v1" >> ~/.profile - -# Start the scanner -$ cd /opt/http-observatory -$ HTTPOBS_DATABASE_USER="httpobsscanner" HTTPOBS_DATABASE_PASS="....." \ - /opt/http-observatory/httpobs/scripts/httpobs-scan-worker - -# Start the API (in another terminal) -# HTTPOBS_DATABASE_USER="httpobsapi" HTTPOBS_DATABASE_PASS="....." \ - uwsgi --http :57001 --wsgi-file httpobs/website/main.py --processes 8 --callable app --master -``` - ## Authors * April King diff --git a/httpobs/scripts/httpobs-local-scan b/httpobs/scripts/scan.py similarity index 97% rename from httpobs/scripts/httpobs-local-scan rename to httpobs/scripts/scan.py index 6193a77e..7ce0011d 100755 --- a/httpobs/scripts/httpobs-local-scan +++ b/httpobs/scripts/scan.py @@ -5,9 +5,10 @@ from operator import itemgetter from urllib.parse import urlparse -import httpobs.scanner.local +import httpobs.scanner -if __name__ == "__main__": + +def main(): parser = argparse.ArgumentParser() # Add the various arguments @@ -56,7 +57,7 @@ args['verify'] = False # Get the scan results - r = httpobs.scanner.local.scan(**args) + r = httpobs.scanner.scan(**args) # print out the results to the command line if output_format == 'json': @@ -76,3 +77,7 @@ if score[1] > 0: score[1] = '+' + str(score[1]) # display 5 as +5 print(' {test:<30} [{modifier:>3}] {reason}'.format(test=score[0], modifier=score[1], reason=score[2])) + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml index 02f23edd..1776c86f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,6 +6,9 @@ license = "MPL-2.0" authors = ["April King "] maintainers = ["Leo McArdle "] +[tool.poetry.scripts] +httpobs-local-scan = 'httpobs.scripts.scan:main' + [tool.poetry.dependencies] python = "^3.11" beautifulsoup4 = "^4.12.2" From 875c228c911e31e17ec9115901e06a727feaadd3 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 5 Jan 2024 10:05:37 +0000 Subject: [PATCH 25/31] add script to run flask and document in README --- README.md | 7 ++++++- httpobs/website/main.py | 6 +++++- pyproject.toml | 1 + 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5e9aa633..a65b5aef 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Sites can be scanned using: * [observatory-cli](https://github.com/mozilla/observatory-cli) - the official node.js command line interface * [java-http-observatory-api](https://github.com/stoennies/java-http-observatory-api) - a third party java library and command line interface -## Contributing +## Development ### Prerequisites * Python 3.11 @@ -39,6 +39,11 @@ $ poetry install $ poetry shell # Install the pre-commit hooks $ pre-commit install +# copy and edit the config file +$ cp httpobs/conf/httpobs.conf ~/.httpobs.conf +$ nano ~/.httpobs.conf +# start the dev server +$ httpobs-server ``` ## Running a scan from the local codebase, without DB, for continuous integration diff --git a/httpobs/website/main.py b/httpobs/website/main.py index d2af069f..b7e6ad76 100644 --- a/httpobs/website/main.py +++ b/httpobs/website/main.py @@ -26,5 +26,9 @@ def main() -> str: return 'Welcome to the HTTP Observatory!' -if __name__ == '__main__': +def run(): app.run(debug=DEVELOPMENT_MODE, port=API_PORT) + + +if __name__ == '__main__': + run() diff --git a/pyproject.toml b/pyproject.toml index 1776c86f..89456591 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,7 @@ maintainers = ["Leo McArdle "] [tool.poetry.scripts] httpobs-local-scan = 'httpobs.scripts.scan:main' +httpobs-server = "httpobs.website.main:run" [tool.poetry.dependencies] python = "^3.11" From 848bba469b30c1bb4fdccdef3aa02bb3c270ed16 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 5 Jan 2024 10:18:25 +0000 Subject: [PATCH 26/31] don't save exception in db/send in response --- httpobs/website/api.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/httpobs/website/api.py b/httpobs/website/api.py index ced312bf..83894f01 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -86,15 +86,15 @@ def api_post_scan_hostname(): scan_id, result, ) - except Exception as e: + except: # If we are unsuccessful, close out the scan in the database - row = database.update_scan_state(scan_id, STATE_FAILED, error=repr(e)) + row = database.update_scan_state(scan_id, STATE_FAILED) # Print the exception to stderr if we're in dev if DEVELOPMENT_MODE: import traceback - print("Error detected in scan for : " + hostname) + print("Error detected in scan for: " + hostname) traceback.print_exc(file=sys.stderr) else: return { From 64d748b873638c3fede542f8fdf31a56d7ee98ee Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 5 Jan 2024 10:31:57 +0000 Subject: [PATCH 27/31] run all tests, add to README, update test_retriever --- .github/workflows/test.yml | 2 +- README.md | 6 ++++++ httpobs/tests/unittests/test_retriever.py | 12 ++++++------ 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index edfa90f7..f672202d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -35,4 +35,4 @@ jobs: - name: Run pre-commit against all files run: pre-commit run --all-files - name: Run nose tests - run: nosetests httpobs/tests -e insert_test_result -e scored_test -e select_test_results -e test_retrieve --with-coverage --cover-package=httpobs + run: nosetests httpobs/tests --with-coverage --cover-package=httpobs diff --git a/README.md b/README.md index a65b5aef..d2e77252 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,12 @@ $ nano ~/.httpobs.conf $ httpobs-server ``` +### Running tests + +```bash +$ nosetests httpobs/tests --with-coverage --cover-package=httpobs +``` + ## Running a scan from the local codebase, without DB, for continuous integration ```bash # Install the HTTP Observatory diff --git a/httpobs/tests/unittests/test_retriever.py b/httpobs/tests/unittests/test_retriever.py index a60084dd..6107c8ec 100644 --- a/httpobs/tests/unittests/test_retriever.py +++ b/httpobs/tests/unittests/test_retriever.py @@ -22,10 +22,10 @@ def test_retrieve_non_existent_domain(self): self.assertEquals(domain, reqs['hostname']) self.assertEquals({}, reqs['resources']) - def test_retrieve_mozilla(self): - reqs = retrieve_all('mozilla.org') + def test_retrieve_mdn(self): + reqs = retrieve_all('developer.mozilla.org') - # Various things we know about mozilla.org + # Various things we know about developer.mozilla.org self.assertIsNotNone(reqs['resources']['__path__']) self.assertIsNotNone(reqs['resources']['/contribute.json']) self.assertIsNotNone(reqs['resources']['/robots.txt']) @@ -38,12 +38,12 @@ def test_retrieve_mozilla(self): self.assertIsInstance(reqs['responses']['https'], requests.Response) self.assertIsInstance(reqs['session'], requests.Session) - self.assertEquals(reqs['hostname'], 'mozilla.org') + self.assertEquals(reqs['hostname'], 'developer.mozilla.org') self.assertEquals('text/html', reqs['responses']['auto'].headers['Content-Type'][0:9]) - self.assertEquals(2, len(reqs['responses']['auto'].history)) + self.assertEquals(1, len(reqs['responses']['auto'].history)) self.assertEquals(200, reqs['responses']['auto'].status_code) - self.assertEquals('https://www.mozilla.org/en-US/', reqs['responses']['auto'].url) + self.assertEquals('https://developer.mozilla.org/en-US/', reqs['responses']['auto'].url) def test_retrieve_invalid_cert(self): reqs = retrieve_all('expired.badssl.com') From fd7557d251019d182f236640275cce30700106ab Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 5 Jan 2024 10:49:12 +0000 Subject: [PATCH 28/31] cache poetry and pre-commit in github action --- .github/workflows/test.yml | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f672202d..9b31cbf0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -12,27 +12,31 @@ name: Run tests jobs: test: - name: nose tests + name: lint and test runs-on: ubuntu-latest strategy: matrix: - python-version: [ '3.11' ] + python-version: ["3.11"] env: HTTPOBS_DATABASE_HOST: fakehost HTTPOBS_DATABASE_PASS: foo HTTPOBS_DATABASE_USER: bar steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - name: Install poetry + run: pipx install poetry + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - pip install poetry - poetry config virtualenvs.create false - poetry install + cache: "poetry" + - run: poetry install + + - uses: actions/cache@v3 + with: + path: ~/.cache/pre-commit + key: pre-commit|${{ hashFiles('.pre-commit-config.yaml') }} - name: Run pre-commit against all files - run: pre-commit run --all-files + run: poetry run pre-commit run --all-files + - name: Run nose tests - run: nosetests httpobs/tests --with-coverage --cover-package=httpobs + run: poetry run nosetests httpobs/tests --with-coverage --cover-package=httpobs From 66b200b9932e935ded5b5286f13d59abc5b8e6df Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 5 Jan 2024 11:05:19 +0000 Subject: [PATCH 29/31] fix: fail fast if attempting to validate empty hostname --- httpobs/website/api.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/httpobs/website/api.py b/httpobs/website/api.py index 83894f01..9ef3b8fd 100644 --- a/httpobs/website/api.py +++ b/httpobs/website/api.py @@ -26,11 +26,13 @@ def api_post_scan_hostname(): # TODO: Allow people to accidentally use https://mozilla.org and convert to mozilla.org # Get the hostname - hostname = request.args.get('host', '').lower() + hostname = request.args.get('host', '').lower().strip() # Fail if it's not a valid hostname (not in DNS, not a real hostname, etc.) ip = True if valid_hostname(hostname) is None else False - hostname = valid_hostname(hostname) or valid_hostname('www.' + hostname) # prepend www. if necessary + hostname = valid_hostname(hostname) or ( + valid_hostname('www.' + hostname) if hostname else False + ) # prepend www. if necessary if ip: return { From 46bb16a21e1aa25ff9f69997520347b50499fc57 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 5 Jan 2024 11:23:14 +0000 Subject: [PATCH 30/31] update contribute.json --- httpobs/docs/contribute.json | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/httpobs/docs/contribute.json b/httpobs/docs/contribute.json index 580dbcb9..e8ef8cce 100644 --- a/httpobs/docs/contribute.json +++ b/httpobs/docs/contribute.json @@ -4,14 +4,14 @@ "repository": { "url": "https://github.com/mozilla/http-observatory", "license": "MPL2", - "tests": "https://travis-ci.org/mozilla/http-observatory" + "tests": "https://github.com/mozilla/http-observatory/actions" }, "participate": { "home": "https://observatory.mozilla.org/", "docs": "https://github.com/mozilla/http-observatory/tree/master/httpobs/docs", - "irc": "irc://irc.mozilla.org/#observatory", - "irc-contacts": [ - "April" + "slack": "https://mozilla.slack.com/", + "slack-contacts": [ + "leo" ] }, "bugs": { @@ -21,8 +21,8 @@ }, "urls": { "prod": "https://observatory.mozilla.org/", - "stage": "https://mozilla.github.io/http-observatory-website/", - "dev": "https://mozilla.github.io/http-observatory-website/" + "stage": "https://observatory.allizom.org/", + "dev": "https://observatory.allizom.org/" }, "keywords": [ "python", From 5b9356b7fe0749664dc2c2ee1d6e253b5823eb82 Mon Sep 17 00:00:00 2001 From: Leo McArdle Date: Fri, 5 Jan 2024 12:23:41 +0000 Subject: [PATCH 31/31] add httpobs-regen-hsts-preload script to poetry config --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 89456591..d7ae612e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,6 +9,7 @@ maintainers = ["Leo McArdle "] [tool.poetry.scripts] httpobs-local-scan = 'httpobs.scripts.scan:main' httpobs-server = "httpobs.website.main:run" +httpobs-regen-hsts-preload = "httpobs.scanner.utils:retrieve_store_hsts_preload_list" [tool.poetry.dependencies] python = "^3.11"