diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..b2563f4 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect script files and perform LF normalization. +*.sh text eol=lf diff --git a/.github/workflows/python-lint/action.yaml b/.github/workflows/python-lint/action.yaml index 2e80858..7021af2 100644 --- a/.github/workflows/python-lint/action.yaml +++ b/.github/workflows/python-lint/action.yaml @@ -55,44 +55,38 @@ runs: key: ${{ runner.os }}-pip restore-keys: ${{ runner.os }}-pip - #---------------------------------------------- - # Install linters - - name: Install linters - shell: bash - run: poetry run pip install pylint flake8 black isort mypy ruff - #---------------------------------------------- # Run Pylint - name: Pylint shell: bash - run: poetry run pylint . --ignore=.venv --disable=C,W1203,R0902,R0913,R0914,R0917,R0801 + run: poetry run pylint . #---------------------------------------------- # Run Flake8 - name: Flake8 shell: bash - run: poetry run poetry run flake8 . --exclude=.venv --ignore=E501 + run: poetry run flake8 . #---------------------------------------------- # Run Black - name: Black shell: bash - run: poetry run black . --exclude=.venv --check + run: poetry run black . --check #---------------------------------------------- # Run Isort - name: Isort shell: bash - run: poetry run isort . --skip .venv --check-only + run: poetry run isort . --check-only #---------------------------------------------- # Run Mypy - name: Mypy shell: bash - run: poetry run mypy . --install-types --non-interactive --exclude .venv + run: poetry run mypy . --install-types --non-interactive #---------------------------------------------- # Run Ruff - name: Ruff shell: bash - run: poetry run ruff check . --exclude .venv + run: poetry run ruff check . diff --git a/CHANGELOG.md b/CHANGELOG.md index aaa8900..05b176d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,12 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.3.0] - 2025-02-02 + +### Added + +[#31](https://github.com/ssenart/gazpar2haws/issues/31): Cost integration. + ## [0.2.1] - 2025-01-24 ### Fixed diff --git a/README.md b/README.md index aa49faa..bd4176c 100644 --- a/README.md +++ b/README.md @@ -180,6 +180,262 @@ The history is uploaded on the entities with names: `${name}` is 'gazpar2haws' defined in the above configuration file. It can be replaced by any other name. +### Cost configuration + +Gazpar2HAWS is able to compute and publish cost history to Home Assistant. + +The cost computation is based in gas prices defined in the configuration files. + +The section 'Pricing' is broken into 5 sub-sections: +- vat: Value added tax definition. +- consumption_prices: All the gas price history in €/kWh. +- subscription_prices: The subscription prices in €/month (or year). +- transport_prices: The fixed prices in €/month (or year) to transport the gas. +- energy_taxes: Various taxes on energy in €/kWh. + +Below, many examples illustrates how to use pricing configuration for use cases from the simplest to the most complex. + + +Example 1: A fixed consumption price +--- + +The given price applies at the given date, after and before. + +The default unit is € per kWh. + +**Formula:** +```math +cost[€] = quantity[kWh] * price[€/kWh] +``` + + +```yaml +pricing: + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. +``` + +Example 2: A fixed consumption price in another unit +--- + +*value_unit* is the price unit (default: €). +*base_unit* is the denominator unit (default: kWh). + +**Formula:** +```math +cost[€] = \frac{quantity[kWh] * price[¢/MWh] * converter\_factor[¢->€]} {converter\_factor[MWh->kWh]} +``` + + +```yaml +pricing: + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 7790.0 # Unit is now ¢/MWh. + value_unit: "¢" + base_unit: "MWh" +``` + +Example 3: Multiple prices over time +--- + +```yaml +pricing: + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + - start_date: "2024-01-01" + value: 0.06888 # Default unit is €/kWh. +``` + +Price is 0.07790 before 2024-01-01. + +Price is 0.06888 on 2024-01-01 and after. + + +Example 4: Price is given excluding tax +--- + +The *normal* value added tax (*vat*) rate is 20%. + +```yaml +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. +``` + +**Formula:** +```math +cost[€] = quantity[kWh] * price[€/kWh] * (1 + vat[normal]) +``` + +Example 5: Subscription price +--- + +A fixed montly subscription is due over consumption. + +Subscription *vat* tax may be different than the consumption *vat* tax. + +```yaml +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: "reduced" +``` + +**Formula:** +```math +cost[€] = quantity[kWh] * cons\_price[€/kWh] * (1 + vat[normal]) + sub\_price * (1 + vat[reduced]) +``` + + +Example 6: Transport price +--- + +A fixed yearly transport may be charged as well. + +```yaml +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced +``` +**Formula:** +```math +cost[€] = quantity[kWh] * cons\_price[€/kWh] * (1 + vat[normal]) + trans\_price * (1 + vat[reduced]) +``` + +Example 7: Energy taxes +--- + +Consumption may be taxed by additional taxes (known as energy taxes). + +```yaml +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal +``` +**Formula:** +```math +cost[€] = quantity[kWh] * (cons\_price[€/kWh] + ener\_taxes[€/kWh])* (1 + vat[normal]) +``` + +Example 8: All in one +--- + +In the price list, the first item properties are propagated to the next items in the list. If their values does not change, it is not required to repeat them. + +```yaml +pricing: + vat: + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2023-07-01" + value: 0.05392 + - start_date: "2023-08-01" + value: 0.05568 + - start_date: "2023-09-01" + value: 0.05412 + - start_date: "2023-10-01" + value: 0.06333 + - start_date: "2023-11-01" + value: 0.06716 + - start_date: "2023-12-01" + value: 0.07235 + - start_date: "2024-01-01" + value: 0.06888 + - start_date: "2024-02-01" + value: 0.05972 + - start_date: "2024-03-01" + value: 0.05506 + - start_date: "2024-04-01" + value: 0.04842 + - start_date: "2025-01-01" + value: 0.07807 + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: reduced + - start_date: "2023-07-01" + value: 20.36 + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2024-01-01" + value: 0.01637 +``` + ### Environment variable for Docker In a Docker environment, the configurations files are instantiated by replacing the environment variables below in the template files: diff --git a/addons/gazpar2haws/DOCS.md b/addons/gazpar2haws/DOCS.md index 1a98577..5107d6e 100644 --- a/addons/gazpar2haws/DOCS.md +++ b/addons/gazpar2haws/DOCS.md @@ -33,3 +33,251 @@ devices: | devices[].timezone | Timezone of the GrDF data | No | Europe/Paris | | devices[].last_days | Number of days of history data to retrieve | No | 365 days | | devices[].reset | Rebuild the history. If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added | No | false | + +## Cost configuration + +Gazpar2HAWS is able to compute and publish cost history to Home Assistant. + +The cost computation is based in gas prices defined in the configuration files. + +The pricing configuration is broken into 5 sections: +- vat: Value added tax definition. +- consumption_prices: All the gas price history in €/kWh. +- subscription_prices: The subscription prices in €/month (or year). +- transport_prices: The fixed prices in €/month (or year) to transport the gas. +- energy_taxes: Various taxes on energy in €/kWh. + +Below, many examples illustrates how to use pricing configuration for use cases from the simplest to the most complex. + + +Example 1: A fixed consumption price +--- + +The given price applies at the given date, after and before. + +The default unit is € per kWh. + +**Formula:** +```math +cost[€] = quantity[kWh] * price[€/kWh] +``` + + +```yaml +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. +``` + +Example 2: A fixed consumption price in another unit +--- + +*value_unit* is the price unit (default: €). +*base_unit* is the denominator unit (default: kWh). + +**Formula:** +```math +cost[€] = \frac{quantity[kWh] * price[¢/MWh] * converter\_factor[¢->€]} {converter\_factor[MWh->kWh]} +``` + + +```yaml +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 7790.0 # Unit is now ¢/MWh. + value_unit: "¢" + base_unit: "MWh" +``` + +Example 3: Multiple prices over time +--- + +```yaml +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + - start_date: "2024-01-01" + value: 0.06888 # Default unit is €/kWh. +``` + +Price is 0.07790 before 2024-01-01. + +Price is 0.06888 on 2024-01-01 and after. + + +Example 4: Price is given excluding tax +--- + +The *normal* value added tax (*vat*) rate is 20%. + +```yaml +vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. +``` + +**Formula:** +```math +cost[€] = quantity[kWh] * price[€/kWh] * (1 + vat[normal]) +``` + +Example 5: Subscription price +--- + +A fixed montly subscription is due over consumption. + +Subscription *vat* tax may be different than the consumption *vat* tax. + +```yaml +vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. +subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: "reduced" +``` + +**Formula:** +```math +cost[€] = quantity[kWh] * cons\_price[€/kWh] * (1 + vat[normal]) + sub\_price * (1 + vat[reduced]) +``` + + +Example 6: Transport price +--- + +A fixed yearly transport may be charged as well. + +```yaml +vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. +transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced +``` +**Formula:** +```math +cost[€] = quantity[kWh] * cons\_price[€/kWh] * (1 + vat[normal]) + trans\_price * (1 + vat[reduced]) +``` + +Example 7: Energy taxes +--- + +Consumption may be taxed by additional taxes (known as energy taxes). + +```yaml +vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. +energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal +``` +**Formula:** +```math +cost[€] = quantity[kWh] * (cons\_price[€/kWh] + ener\_taxes[€/kWh])* (1 + vat[normal]) +``` + +Example 8: All in one +--- + +In the price list, the first item properties are propagated to the next items in the list. If their values does not change, it is not required to repeat them. + +```yaml +vat: + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 +consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2023-07-01" + value: 0.05392 + - start_date: "2023-08-01" + value: 0.05568 + - start_date: "2023-09-01" + value: 0.05412 + - start_date: "2023-10-01" + value: 0.06333 + - start_date: "2023-11-01" + value: 0.06716 + - start_date: "2023-12-01" + value: 0.07235 + - start_date: "2024-01-01" + value: 0.06888 + - start_date: "2024-02-01" + value: 0.05972 + - start_date: "2024-03-01" + value: 0.05506 + - start_date: "2024-04-01" + value: 0.04842 + - start_date: "2025-01-01" + value: 0.07807 +subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: reduced + - start_date: "2023-07-01" + value: 20.36 +transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced +energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2024-01-01" + value: 0.01637 +``` diff --git a/addons/gazpar2haws/Dockerfile b/addons/gazpar2haws/Dockerfile index 9e33ab2..887b14c 100644 --- a/addons/gazpar2haws/Dockerfile +++ b/addons/gazpar2haws/Dockerfile @@ -5,12 +5,15 @@ FROM $BUILD_FROM # Gazpar2HAWS version to install. ARG GAZPAR2HAWS_VERSION +# gettext is required for envsubst RUN apk add --no-cache gettext + +# yq is required for the configuration file: yaml to json RUN apk add --no-cache yq ENV PIP_BREAK_SYSTEM_PACKAGES=1 -RUN pip3 install --no-cache-dir gazpar2haws==${GAZPAR2HAWS_VERSION} +RUN pip3 install --no-cache-dir --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ gazpar2haws==${GAZPAR2HAWS_VERSION} WORKDIR /app diff --git a/addons/gazpar2haws/build.yaml b/addons/gazpar2haws/build.yaml index 59d7441..d8ac1da 100644 --- a/addons/gazpar2haws/build.yaml +++ b/addons/gazpar2haws/build.yaml @@ -11,4 +11,4 @@ labels: org.opencontainers.image.source: "https://github.com/ssenart/gazpar2haws" org.opencontainers.image.licenses: "MIT" args: - GAZPAR2HAWS_VERSION: "0.2.1" \ No newline at end of file + GAZPAR2HAWS_VERSION: "0.3.0.dev14" \ No newline at end of file diff --git a/addons/gazpar2haws/config.yaml b/addons/gazpar2haws/config.yaml index 1edef2d..e53dbfa 100644 --- a/addons/gazpar2haws/config.yaml +++ b/addons/gazpar2haws/config.yaml @@ -1,6 +1,6 @@ name: "Gazpar2HAWS" description: "Gazpar2HAWS is a gateway that reads data history from the GrDF (French gas provider) meter and send it to Home Assistant using WebSocket interface." -version: "0.2.1" +version: "0.3.0.dev14" slug: "gazpar2haws" init: false arch: @@ -14,20 +14,94 @@ hassio_api: true options: scan_interval: 480 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). devices: - - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.. + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.{{name}}. username: "" # Email address used to connect to the GrDF website. password: "" # Password used to connect to the GrDF website. pce_identifier: "" # PCE identifier of the meter. It should be a positive integer. timezone: "Europe/Paris" # Timezone of the data. It should be the same as the Home Assistant timezone. last_days: 365 # Number of days of data to retrieve reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + vat: + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2023-07-01" + value: 0.05392 + - start_date: "2023-08-01" + value: 0.05568 + - start_date: "2023-09-01" + value: 0.05412 + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: reduced + - start_date: "2023-07-01" + value: 20.36 + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2024-01-01" + value: 0.01637 schema: scan_interval: int # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). devices: - - name: str # Name of the device in home assistant. It will be used as the entity_id: sensor.. + - name: str # Name of the device in home assistant. It will be used as the entity_id: sensor.{{name}}. username: email # Email address used to connect to the GrDF website. password: password # Password used to connect to the GrDF website. pce_identifier: match(^0|[1-9][0-9]*$) # PCE identifier of the meter. It should be a positive integer. timezone: str? # Timezone of the data. It should be the same as the Home Assistant timezone. last_days: int? # Number of days of data to retrieve. reset: bool? # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + vat: + - id: str # Identifier of the VAT rate. + start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the VAT rate. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the VAT rate. Format is "YYYY-MM-DD". + value: float # Value of the VAT rate. + consumption_prices: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the price. + value_unit: str? # Unit of the price: €, ¢. + base_unit: str? # Base unit of the price: Wh, kWh, MWh, m³, l + vat_id: str? # Identifier of the VAT rate. + subscription_prices: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the price. + value_unit: str? # Unit of the price: €, ¢. + base_unit: str? # Base unit of the price: day, month, year + vat_id: str? # Identifier of the VAT rate. + transport_prices: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the price. + value_unit: str? # Unit of the price: €, ¢. + base_unit: str? # Base unit of the price: day, month, year + vat_id: str? # Identifier of the VAT rate. + energy_taxes: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the tax. + value_unit: str? # Unit of the tax: €, ¢. + base_unit: str? # Base unit of the tax: Wh, kWh, MWh, m³, l + vat_id: str? # Identifier of the VAT rate. diff --git a/addons/gazpar2haws/config.yaml.template b/addons/gazpar2haws/config.yaml.template index a337db8..48ef9ac 100644 --- a/addons/gazpar2haws/config.yaml.template +++ b/addons/gazpar2haws/config.yaml.template @@ -14,20 +14,94 @@ hassio_api: true options: scan_interval: 480 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). devices: - - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.{{name}}. username: "" # Email address used to connect to the GrDF website. password: "" # Password used to connect to the GrDF website. pce_identifier: "" # PCE identifier of the meter. It should be a positive integer. timezone: "Europe/Paris" # Timezone of the data. It should be the same as the Home Assistant timezone. last_days: 365 # Number of days of data to retrieve reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + vat: + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2023-07-01" + value: 0.05392 + - start_date: "2023-08-01" + value: 0.05568 + - start_date: "2023-09-01" + value: 0.05412 + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: reduced + - start_date: "2023-07-01" + value: 20.36 + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2024-01-01" + value: 0.01637 schema: scan_interval: int # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). devices: - - name: str # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + - name: str # Name of the device in home assistant. It will be used as the entity_id: sensor.{{name}}. username: email # Email address used to connect to the GrDF website. password: password # Password used to connect to the GrDF website. pce_identifier: match(^0|[1-9][0-9]*$) # PCE identifier of the meter. It should be a positive integer. timezone: str? # Timezone of the data. It should be the same as the Home Assistant timezone. last_days: int? # Number of days of data to retrieve. reset: bool? # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + vat: + - id: str # Identifier of the VAT rate. + start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the VAT rate. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the VAT rate. Format is "YYYY-MM-DD". + value: float # Value of the VAT rate. + consumption_prices: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the price. + value_unit: str? # Unit of the price: €, ¢. + base_unit: str? # Base unit of the price: Wh, kWh, MWh, m³, l + vat_id: str? # Identifier of the VAT rate. + subscription_prices: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the price. + value_unit: str? # Unit of the price: €, ¢. + base_unit: str? # Base unit of the price: day, month, year + vat_id: str? # Identifier of the VAT rate. + transport_prices: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the price. + value_unit: str? # Unit of the price: €, ¢. + base_unit: str? # Base unit of the price: day, month, year + vat_id: str? # Identifier of the VAT rate. + energy_taxes: + - start_date: match(^\d{4}-\d{2}-\d{2}$) # Start date of the price. Format is "YYYY-MM-DD". + end_date: match(^\d{4}-\d{2}-\d{2}$)? # End date of the price. Format is "YYYY-MM-DD". + value: float # Value of the tax. + value_unit: str? # Unit of the tax: €, ¢. + base_unit: str? # Base unit of the tax: Wh, kWh, MWh, m³, l + vat_id: str? # Identifier of the VAT rate. diff --git a/addons/gazpar2haws/rootfs/app/config/configuration.template.yaml b/addons/gazpar2haws/rootfs/app/config/configuration.template.yaml index 69f3267..d9f2929 100644 --- a/addons/gazpar2haws/rootfs/app/config/configuration.template.yaml +++ b/addons/gazpar2haws/rootfs/app/config/configuration.template.yaml @@ -11,3 +11,5 @@ homeassistant: port: "${HOMEASSISTANT_PORT}" endpoint: "${HOMEASSISTANT_ENDPOINT}" token: "${HOMEASSISTANT_TOKEN}" + +${PRICING_CONFIG} diff --git a/addons/gazpar2haws/rootfs/app/run.sh b/addons/gazpar2haws/rootfs/app/run.sh index 1f10338..14cab07 100644 --- a/addons/gazpar2haws/rootfs/app/run.sh +++ b/addons/gazpar2haws/rootfs/app/run.sh @@ -1,9 +1,31 @@ #!/usr/bin/with-contenv bashio +# Location of the Add-on configuration file +CONFIG_PATH=/data/options.json + # Load the Add-on configuration in JSON and reformat it to YAML. -GRDF_JSON="{ 'grdf': $(bashio::addon.config) }" +SCAN_INTERVAL_JSON=$(jq --raw-output '.scan_interval // empty' $CONFIG_PATH) + +DEVICES_JSON=$(jq --raw-output '.devices // empty' $CONFIG_PATH) + +GRDF_JSON="{ 'grdf': { 'scan_interval': $SCAN_INTERVAL_JSON, 'devices': $DEVICES_JSON } }" + GRDF_CONFIG=$(echo $GRDF_JSON | yq -P) +VAT_JSON=$(jq --raw-output '.vat // empty' $CONFIG_PATH) + +CONSUMPTION_PRICES_JSON=$(jq --raw-output '.consumption_prices // empty' $CONFIG_PATH) + +SUBSCRIPTION_PRICES_JSON=$(jq --raw-output '.subscription_prices // empty' $CONFIG_PATH) + +TRANSPORT_PRICES_JSON=$(jq --raw-output '.transport_prices // empty' $CONFIG_PATH) + +ENERGY_TAXES_JSON=$(jq --raw-output '.energy_taxes // empty' $CONFIG_PATH) + +PRICING_JSON="{ 'pricing': { 'vat': $VAT_JSON , 'consumption_prices': $CONSUMPTION_PRICES_JSON, 'subscription_prices': $SUBSCRIPTION_PRICES_JSON, 'transport_prices': $TRANSPORT_PRICES_JSON, 'energy_taxes': $ENERGY_TAXES_JSON } }" + +PRICING_CONFIG=$(echo $PRICING_JSON | yq -P) + # Home Assistant configuration for Add-on HOMEASSISTANT_HOST=supervisor HOMEASSISTANT_PORT=80 @@ -11,14 +33,15 @@ HOMEASSISTANT_ENDPOINT=/core/websocket HOMEASSISTANT_TOKEN=${SUPERVISOR_TOKEN} # Display environment variables -bashio::log.info "GRDF_CONFIG: ${GRDF_CONFIG}" +# bashio::log.info "GRDF_CONFIG: ${GRDF_CONFIG}" +# bashio::log.info "PRICING_CONFIG: ${PRICING_CONFIG}" bashio::log.info "HOMEASSISTANT_HOST: ${HOMEASSISTANT_HOST}" bashio::log.info "HOMEASSISTANT_PORT: ${HOMEASSISTANT_PORT}" bashio::log.info "HOMEASSISTANT_ENDPOINT: ${HOMEASSISTANT_ENDPOINT}" bashio::log.info "HOMEASSISTANT_TOKEN: ${HOMEASSISTANT_TOKEN}" # Export environment variables -export GRDF_CONFIG HOMEASSISTANT_HOST HOMEASSISTANT_PORT HOMEASSISTANT_ENDPOINT HOMEASSISTANT_TOKEN +export GRDF_CONFIG PRICING_CONFIG HOMEASSISTANT_HOST HOMEASSISTANT_PORT HOMEASSISTANT_ENDPOINT HOMEASSISTANT_TOKEN # Instantiate the template config if [ ! -e /app/config/configuration.yaml ]; then diff --git a/config/configuration.template.yaml b/config/configuration.template.yaml index 5517d80..f15740e 100644 --- a/config/configuration.template.yaml +++ b/config/configuration.template.yaml @@ -7,7 +7,7 @@ logging: grdf: scan_interval: ${GRDF_SCAN_INTERVAL} # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). devices: - - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.{{name}}. data_source: "json" # Data source to use: "json" | "excel" | "test". Default is "json". as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". username: "!secret grdf.username" @@ -23,3 +23,62 @@ homeassistant: port: "!secret homeassistant.port" endpoint: "${HOMEASSISTANT_ENDPOINT}" token: "!secret homeassistant.token" + +pricing: + vat: + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2023-07-01" + value: 0.05392 + - start_date: "2023-08-01" + value: 0.05568 + - start_date: "2023-09-01" + value: 0.05412 + - start_date: "2023-10-01" + value: 0.06333 + - start_date: "2023-11-01" + value: 0.06716 + - start_date: "2023-12-01" + value: 0.07235 + - start_date: "2024-01-01" + value: 0.06888 + - start_date: "2024-02-01" + value: 0.05972 + - start_date: "2024-03-01" + value: 0.05506 + - start_date: "2024-04-01" + value: 0.04842 + - start_date: "2025-01-01" + value: 0.07807 + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: reduced + - start_date: "2023-07-01" + value: 20.36 + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2024-01-01" + value: 0.01637 diff --git a/config/configuration.yaml b/config/configuration.yaml index 58dad82..a39c28b 100644 --- a/config/configuration.yaml +++ b/config/configuration.yaml @@ -20,3 +20,62 @@ homeassistant: port: "!secret homeassistant.port" endpoint: "/api/websocket" token: "!secret homeassistant.token" + +pricing: + vat: + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2023-07-01" + value: 0.05392 + - start_date: "2023-08-01" + value: 0.05568 + - start_date: "2023-09-01" + value: 0.05412 + - start_date: "2023-10-01" + value: 0.06333 + - start_date: "2023-11-01" + value: 0.06716 + - start_date: "2023-12-01" + value: 0.07235 + - start_date: "2024-01-01" + value: 0.06888 + - start_date: "2024-02-01" + value: 0.05972 + - start_date: "2024-03-01" + value: 0.05506 + - start_date: "2024-04-01" + value: 0.04842 + - start_date: "2025-01-01" + value: 0.07807 + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: reduced + - start_date: "2023-07-01" + value: 20.36 + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2024-01-01" + value: 0.01637 diff --git a/gazpar2haws/__main__.py b/gazpar2haws/__main__.py index 49018d3..f06c64a 100644 --- a/gazpar2haws/__main__.py +++ b/gazpar2haws/__main__.py @@ -3,8 +3,9 @@ import logging import traceback -from gazpar2haws import __version__, config_utils +from gazpar2haws import __version__ from gazpar2haws.bridge import Bridge +from gazpar2haws.configuration import Configuration Logger = logging.getLogger(__name__) @@ -16,9 +17,7 @@ async def main(): prog="gazpar2haws", description="Gateway that reads data history from the GrDF (French gas provider) meter and send it to Home Assistant using WebSocket interface.", ) - parser.add_argument( - "-v", "--version", action="version", version="Gazpar2HAWS version" - ) + parser.add_argument("-v", "--version", action="version", version="Gazpar2HAWS version") parser.add_argument( "-c", "--config", @@ -38,17 +37,15 @@ async def main(): try: # Load configuration files - config = config_utils.ConfigLoader(args.config, args.secrets) - config.load_secrets() - config.load_config() + config = Configuration.load(args.config, args.secrets) print(f"Gazpar2HAWS version: {__version__}") # Set up logging - logging_file = config.get("logging.file") - logging_console = bool(config.get("logging.console")) - logging_level = config.get("logging.level") - logging_format = config.get("logging.format") + logging_file = config.logging.file + logging_console = config.logging.console + logging_level = config.logging.level + logging_format = config.logging.format # Convert logging level to integer if logging_level.upper() == "DEBUG": @@ -70,9 +67,7 @@ async def main(): # Add a console handler manually console_handler = logging.StreamHandler() console_handler.setLevel(level) # Set logging level for the console - console_handler.setFormatter( - logging.Formatter(logging_format) - ) # Customize console format + console_handler.setFormatter(logging.Formatter(logging_format)) # Customize console format # Get the root logger and add the console handler logging.getLogger().addHandler(console_handler) @@ -91,12 +86,10 @@ async def main(): return 0 except Exception: # pylint: disable=broad-except - errorMessage = ( - f"An error occured while running Gazpar2HAWS: {traceback.format_exc()}" - ) + errorMessage = f"An error occured while running Gazpar2HAWS: {traceback.format_exc()}" Logger.error(errorMessage) print(errorMessage) - return 1 + raise # ---------------------------------- diff --git a/gazpar2haws/bridge.py b/gazpar2haws/bridge.py index 455f8b3..d6eb9a7 100644 --- a/gazpar2haws/bridge.py +++ b/gazpar2haws/bridge.py @@ -2,7 +2,7 @@ import logging import signal -from gazpar2haws import config_utils +from gazpar2haws.configuration import Configuration from gazpar2haws.gazpar import Gazpar from gazpar2haws.haws import HomeAssistantWS @@ -13,34 +13,22 @@ class Bridge: # ---------------------------------- - def __init__(self, config: config_utils.ConfigLoader): + def __init__(self, config: Configuration): # GrDF scan interval (in seconds) - if config.get("grdf.scan_interval") is None: - raise ValueError("Configuration parameter 'grdf.scan_interval' is missing") - self._grdf_scan_interval = int(config.get("grdf.scan_interval")) + self._grdf_scan_interval = config.grdf.scan_interval # Home Assistant configuration: host - if config.get("homeassistant.host") is None: - raise ValueError("Configuration parameter 'homeassistant.host' is missing") - ha_host = config.get("homeassistant.host") + ha_host = config.homeassistant.host # Home Assistant configuration: port - if config.get("homeassistant.port") is None: - raise ValueError("Configuration parameter 'homeassistant.port' is missing") - ha_port = config.get("homeassistant.port") + ha_port = config.homeassistant.port # Home Assistant configuration: endpoint - ha_endpoint = ( - config.get("homeassistant.endpoint") - if config.get("homeassistant.endpoint") - else "/api/websocket" - ) + ha_endpoint = config.homeassistant.endpoint # Home Assistant configuration: token - if config.get("homeassistant.token") is None: - raise ValueError("Configuration parameter 'homeassistant.token' is missing") - ha_token = config.get("homeassistant.token") + ha_token = config.homeassistant.token.get_secret_value() # Initialize Home Assistant self._homeassistant = HomeAssistantWS(ha_host, ha_port, ha_endpoint, ha_token) @@ -48,10 +36,8 @@ def __init__(self, config: config_utils.ConfigLoader): # Initialize Gazpar self._gazpar = [] - if config.get("grdf.devices") is None: - raise ValueError("Configuration parameter 'grdf.devices' is missing") - for grdf_device_config in config.get("grdf.devices"): - self._gazpar.append(Gazpar(grdf_device_config, self._homeassistant)) + for grdf_device_config in config.grdf.devices: + self._gazpar.append(Gazpar(grdf_device_config, config.pricing, self._homeassistant)) # Set up signal handler signal.signal(signal.SIGINT, self.handle_signal) @@ -85,9 +71,7 @@ async def run(self): for gazpar in self._gazpar: Logger.info(f"Publishing data for device '{gazpar.name()}'...") await gazpar.publish() - Logger.info( - f"Device '{gazpar.name()}' data published to Home Assistant WS." - ) + Logger.info(f"Device '{gazpar.name()}' data published to Home Assistant WS.") Logger.info("Gazpar data published to Home Assistant WS.") @@ -95,9 +79,7 @@ async def run(self): await self._homeassistant.disconnect() # Wait before next scan - Logger.info( - f"Waiting {self._grdf_scan_interval} minutes before next scan..." - ) + Logger.info(f"Waiting {self._grdf_scan_interval} minutes before next scan...") # Check if the scan interval is 0 and leave the loop. if self._grdf_scan_interval == 0: diff --git a/gazpar2haws/config_utils.py b/gazpar2haws/config_utils.py index b3031d9..99bc90e 100644 --- a/gazpar2haws/config_utils.py +++ b/gazpar2haws/config_utils.py @@ -29,9 +29,7 @@ def load_config(self): self.raw_config = yaml.safe_load(file) self.config = self._resolve_secrets(self.raw_config) else: - raise FileNotFoundError( - f"Configuration file '{self.config_file}' not found." - ) + raise FileNotFoundError(f"Configuration file '{self.config_file}' not found.") def _resolve_secrets(self, data): """Recursively resolve `!secret` keys in the configuration.""" @@ -57,5 +55,8 @@ def get(self, key, default=None): except (KeyError, TypeError): return default + def dict(self) -> dict: + return self.config + def dumps(self) -> str: return yaml.dump(self.raw_config) diff --git a/gazpar2haws/configuration.py b/gazpar2haws/configuration.py new file mode 100644 index 0000000..594fc28 --- /dev/null +++ b/gazpar2haws/configuration.py @@ -0,0 +1,28 @@ +from typing import Optional + +import yaml +from pydantic import BaseModel + +from gazpar2haws import config_utils +from gazpar2haws.model import Grdf, HomeAssistant, Logging, Pricing + + +class Configuration(BaseModel): + + logging: Logging + grdf: Grdf + homeassistant: HomeAssistant + pricing: Optional[Pricing] = None + + @classmethod + def load(cls, config_file: str, secrets_file: str): + + # Load configuration + config = config_utils.ConfigLoader(config_file, secrets_file) + config.load_secrets() + config.load_config() + + return cls(**config.dict()) + + def dumps(self) -> str: + return yaml.dump(self.model_dump(mode="json"), allow_unicode=True) diff --git a/gazpar2haws/date_array.py b/gazpar2haws/date_array.py new file mode 100644 index 0000000..ab29fd4 --- /dev/null +++ b/gazpar2haws/date_array.py @@ -0,0 +1,236 @@ +from __future__ import annotations + +import datetime as dt +from typing import Optional, overload + +import numpy as np +from pydantic import BaseModel, ConfigDict, model_validator + + +class DateArray(BaseModel): # pylint: disable=too-few-public-methods + model_config = ConfigDict(arbitrary_types_allowed=True) + + start_date: dt.date + end_date: dt.date + array: Optional[np.ndarray] = None + initial_value: Optional[float] = None + + @model_validator(mode="after") + def set_array(self): + if self.array is None: + if self.initial_value is not None: + self.array = np.full((self.end_date - self.start_date).days + 1, self.initial_value) + else: + self.array = np.zeros((self.end_date - self.start_date).days + 1) + return self + + # ---------------------------------- + def get(self, date: dt.date) -> float: + + if self.array is None: + raise ValueError("Array is not initialized") + + return self.array[(date - self.start_date).days] + + # ---------------------------------- + def cumsum(self) -> DateArray: + + if self.array is None: + raise ValueError("Array is not initialized") + + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = np.cumsum(self.array) + return result + + # ---------------------------------- + def is_aligned_with(self, other: DateArray) -> bool: + + return ( + self.start_date == other.start_date and self.end_date == other.end_date and len(self) == len(other) + ) # pylint: disable=protected-access + + # ---------------------------------- + @overload + def __getitem__(self, index: int) -> float: ... + + @overload + def __getitem__(self, date: dt.date) -> float: ... + + @overload + def __getitem__(self, date_slice: slice) -> np.ndarray: ... + + def __getitem__(self, key): + if self.array is None: + raise ValueError("Array is not initialized") + if isinstance(key, int): + return self.array[key] + if isinstance(key, dt.date): + return self.get(key) + if isinstance(key, slice): + start_date: dt.date = key.start # type: ignore + end_date: dt.date = key.stop # type: ignore + start_index: int = (start_date - self.start_date).days + end_index: int = (end_date - self.start_date).days + 1 + return self.array[start_index:end_index] + raise TypeError("Key must be a date or a slice of dates") + + # ---------------------------------- + @overload + def __setitem__(self, index: int, value: float): ... + + @overload + def __setitem__(self, date: dt.date, value: float): ... + + @overload + def __setitem__(self, date_slice: slice, value: float): ... + + def __setitem__(self, key, value: float): + if self.array is None: + raise ValueError("Array is not initialized") + if isinstance(key, int): + self.array[key] = value + elif isinstance(key, dt.date): + self.array[(key - self.start_date).days] = value + elif isinstance(key, slice): + start_date: dt.date = key.start # type: ignore + end_date: dt.date = key.stop # type: ignore + start_index: int = (start_date - self.start_date).days + end_index: int = (end_date - self.start_date).days + 1 + self.array[start_index:end_index] = value + else: + raise TypeError("Key must be a date or a slice of dates") + + # ---------------------------------- + def __len__(self) -> int: + + if self.array is None: + raise ValueError("Array is not initialized") + + return len(self.array) + + # ---------------------------------- + def __iter__(self): + self._index = 0 # pylint: disable=attribute-defined-outside-init + return self + + # ---------------------------------- + def __next__(self): + if self._index < len(self.array): + current_date = self.start_date + dt.timedelta(days=self._index) + result = (current_date, self.array[self._index]) + self._index += 1 + return result + raise StopIteration + + # ---------------------------------- + @overload + def __add__(self, other: DateArray) -> DateArray: ... + + @overload + def __add__(self, other: float) -> DateArray: ... + + def __add__(self, other) -> DateArray: + + if self.array is None: + raise ValueError("Array is not initialized") + + if isinstance(other, (int, float)): + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array + other + return result + if isinstance(other, DateArray): + if other.array is None: + raise ValueError("Array is not initialized") + if not self.is_aligned_with(other): + raise ValueError("Date arrays are not aligned") + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array + other.array # pylint: disable=protected-access + return result + + raise TypeError("Other must be a date array or a number") + + # ---------------------------------- + @overload + def __sub__(self, other: DateArray) -> DateArray: ... + + @overload + def __sub__(self, other: float) -> DateArray: ... + + def __sub__(self, other) -> DateArray: + + if self.array is None: + raise ValueError("Array is not initialized") + + if isinstance(other, (int, float)): + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array - other + return result + if isinstance(other, DateArray): + if other.array is None: + raise ValueError("Array is not initialized") + if not self.is_aligned_with(other): + raise ValueError("Date arrays are not aligned") + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array - other.array # pylint: disable=protected-access + return result + + raise TypeError("Other must be a date array or a number") + + # ---------------------------------- + @overload + def __mul__(self, other: DateArray) -> DateArray: ... + + @overload + def __mul__(self, other: float) -> DateArray: ... + + def __mul__(self, other) -> DateArray: + + if self.array is None: + raise ValueError("Array is not initialized") + + if isinstance(other, (int, float)): + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array * other + return result + if isinstance(other, DateArray): + if other.array is None: + raise ValueError("Array is not initialized") + if not self.is_aligned_with(other): + raise ValueError("Date arrays are not aligned") + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array * other.array # pylint: disable=protected-access + return result + + raise TypeError("Other must be a date array or a number") + + # ---------------------------------- + @overload + def __truediv__(self, other: DateArray) -> DateArray: ... + + @overload + def __truediv__(self, other: float) -> DateArray: ... + + def __truediv__(self, other) -> DateArray: + + if self.array is None: + raise ValueError("Array is not initialized") + + if isinstance(other, (int, float)): + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array / other + return result + if isinstance(other, DateArray): + if other.array is None: + raise ValueError("Array is not initialized") + if not self.is_aligned_with(other): + raise ValueError("Date arrays are not aligned") + result = DateArray(start_date=self.start_date, end_date=self.end_date) + result.array = self.array / other.array # pylint: disable=protected-access + return result + + raise TypeError("Other must be a date array or a number") + + # ---------------------------------- + def __repr__(self) -> str: + + return f"DateArray(start_date={self.start_date}, end_date={self.end_date}, array={self.array})" diff --git a/gazpar2haws/gazpar.py b/gazpar2haws/gazpar.py index 3cebaa5..934bdef 100644 --- a/gazpar2haws/gazpar.py +++ b/gazpar2haws/gazpar.py @@ -1,12 +1,23 @@ import logging import traceback -from datetime import datetime, timedelta -from typing import Any +from datetime import date, datetime, timedelta +from typing import Optional import pygazpar # type: ignore import pytz +from pygazpar.datasource import MeterReadings # type: ignore +from gazpar2haws.date_array import DateArray from gazpar2haws.haws import HomeAssistantWS, HomeAssistantWSException +from gazpar2haws.model import ( + ConsumptionQuantityArray, + Device, + PriceUnit, + Pricing, + QuantityUnit, + TimeUnit, +) +from gazpar2haws.pricer import Pricer Logger = logging.getLogger(__name__) @@ -15,80 +26,51 @@ class Gazpar: # ---------------------------------- - def __init__(self, config: dict[str, Any], homeassistant: HomeAssistantWS): + def __init__( + self, + device_config: Device, + pricing_config: Optional[Pricing], + homeassistant: HomeAssistantWS, + ): self._homeassistant = homeassistant + self._grdf_config = device_config + self._pricing_config = pricing_config # GrDF configuration: name - if config.get("name") is None: - raise ValueError("Configuration parameter 'grdf.devices[].name' is missing") - self._name = config.get("name") + self._name = device_config.name # GrDF configuration: data source - self._data_source = ( - config.get("data_source") if config.get("data_source") else "json" - ) + self._data_source = device_config.data_source # GrDF configuration: username - if self._data_source != "test" and config.get("username") is None: - raise ValueError( - "Configuration parameter 'grdf.devices[].username' is missing" - ) - self._username = config.get("username") + self._username = device_config.username # GrDF configuration: password - if self._data_source != "test" and config.get("password") is None: - raise ValueError( - "Configuration parameter 'grdf.devices[].password' is missing" - ) - self._password = config.get("password") + self._password = device_config.password.get_secret_value() if device_config.password is not None else None # GrDF configuration: pce_identifier - if self._data_source != "test" and config.get("pce_identifier") is None: - raise ValueError( - "Configuration parameter 'grdf.devices[].pce_identifier' is missing" - ) - self._pce_identifier = str(config.get("pce_identifier")) + self._pce_identifier = ( + device_config.pce_identifier.get_secret_value() if device_config.pce_identifier is not None else None + ) # GrDF configuration: tmp_dir - self._tmp_dir = config.get("tmp_dir") if config.get("tmp_dir") else "/tmp" + self._tmp_dir = device_config.tmp_dir # GrDF configuration: last_days - if config.get("last_days") is None: - raise ValueError( - "Configuration parameter 'grdf.devices[].last_days' is missing" - ) - self._last_days = int(str(config.get("last_days"))) + self._last_days = device_config.last_days # GrDF configuration: timezone - if config.get("timezone") is None: - raise ValueError( - "Configuration parameter 'grdf.devices[].timezone' is missing" - ) - self._timezone = str(config.get("timezone")) + self._timezone = device_config.timezone # GrDF configuration: reset - if config.get("reset") is None: - raise ValueError( - "Configuration parameter 'grdf.devices[].reset' is missing" - ) - self._reset = bool(config.get("reset")) + self._reset = device_config.reset # As of date: YYYY-MM-DD - as_of_date = config.get("as_of_date") - if self._data_source is not None and str(self._data_source).lower() == "test": - self._as_of_date = ( - datetime.now(tz=pytz.timezone(self._timezone)) - if as_of_date is None - else datetime.strptime(as_of_date, "%Y-%m-%d") - ) - else: - self._as_of_date = datetime.now(tz=pytz.timezone(self._timezone)) + self._as_of_date = device_config.as_of_date # Set the timezone - timezone = pytz.timezone(self._timezone) - if self._as_of_date.tzinfo is None: - self._as_of_date = timezone.localize(self._as_of_date) + self._timezone = device_config.timezone # ---------------------------------- def name(self): @@ -98,41 +80,107 @@ def name(self): # Publish Gaspar data to Home Assistant WS async def publish(self): - # Volume and energy sensor names. + # Volume, energy and cost sensor names. volume_sensor_name = f"sensor.{self._name}_volume" energy_sensor_name = f"sensor.{self._name}_energy" + cost_sensor_name = f"sensor.{self._name}_cost" # Eventually reset the sensor in Home Assistant if self._reset: try: - await self._homeassistant.clear_statistics( - [volume_sensor_name, energy_sensor_name] - ) + await self._homeassistant.clear_statistics([volume_sensor_name, energy_sensor_name]) except Exception: - Logger.warning( - f"Error while resetting the sensor in Home Assistant: {traceback.format_exc()}" - ) + Logger.warning(f"Error while resetting the sensor in Home Assistant: {traceback.format_exc()}") raise - # Publish volume sensor - await self._publish_entity( - volume_sensor_name, pygazpar.PropertyName.VOLUME.value, "m³" - ) - await self._publish_entity( - energy_sensor_name, pygazpar.PropertyName.ENERGY.value, "kWh" + last_date_and_value_by_sensor = dict[str, tuple[date, float]]() + + last_date_and_value_by_sensor[volume_sensor_name] = await self.find_last_date_and_value(volume_sensor_name) + last_date_and_value_by_sensor[energy_sensor_name] = await self.find_last_date_and_value(energy_sensor_name) + last_date_and_value_by_sensor[cost_sensor_name] = await self.find_last_date_and_value(cost_sensor_name) + + # Compute the start date as the minimum of the last dates plus one day + start_date = min(v[0] for v in last_date_and_value_by_sensor.values()) + timedelta(days=1) + + # The end date is the as of date + end_date = self._as_of_date + + # Fetch the data from GrDF and publish it to Home Assistant + daily_history = self.fetch_daily_gazpar_history(start_date, end_date) + + # Extract the volume from the daily history + volume_array = self.extract_property_from_daily_gazpar_history( + daily_history, + pygazpar.PropertyName.VOLUME.value, + last_date_and_value_by_sensor[volume_sensor_name][0], + end_date, ) - # ---------------------------------- - # Publish a sensor to Home Assistant - async def _publish_entity( - self, entity_id: str, property_name: str, unit_of_measurement: str - ): + # Extract the energy from the daily history + energy_array = self.extract_property_from_daily_gazpar_history( + daily_history, + pygazpar.PropertyName.ENERGY.value, + last_date_and_value_by_sensor[energy_sensor_name][0], + end_date, + ) - # Find last date, days and value of the entity. - last_date, last_days, last_value = await self._find_last_date_days_value( - entity_id + # Publish the volume and energy to Home Assistant + if volume_array is not None: + await self.publish_date_array( + volume_sensor_name, + "m³", + volume_array, + last_date_and_value_by_sensor[volume_sensor_name][1], + ) + else: + Logger.info("No volume data to publish") + + if energy_array is not None: + await self.publish_date_array( + energy_sensor_name, + "kWh", + energy_array, + last_date_and_value_by_sensor[energy_sensor_name][1], + ) + else: + Logger.info("No energy data to publish") + + if self._pricing_config is None: + Logger.info("No pricing configuration provided") + return + + # Compute the cost from the energy + quantities = ConsumptionQuantityArray( + start_date=last_date_and_value_by_sensor[energy_sensor_name][0], + end_date=end_date, + value_unit=QuantityUnit.KWH, + base_unit=TimeUnit.DAY, + value_array=energy_array, ) + # Compute the cost + if energy_array is not None: + pricer = Pricer(self._pricing_config) + + cost_array = pricer.compute(quantities, PriceUnit.EURO) + else: + cost_array = None + + # Publish the cost to Home Assistant + if cost_array is not None: + await self.publish_date_array( + cost_sensor_name, + cost_array.value_unit, + cost_array.value_array, + last_date_and_value_by_sensor[cost_sensor_name][1], + ) + else: + Logger.info("No cost data to publish") + + # ---------------------------------- + # Fetch daily Gazpar history. + def fetch_daily_gazpar_history(self, start_date: date, end_date: date) -> MeterReadings: + # Instantiate the right data source. data_source = self._create_data_source() @@ -140,48 +188,77 @@ async def _publish_entity( client = pygazpar.Client(data_source) try: - data = client.loadSince( + history = client.loadDateRange( pceIdentifier=self._pce_identifier, - lastNDays=last_days, + startDate=start_date, + endDate=end_date, frequencies=[pygazpar.Frequency.DAILY], ) + res = history[pygazpar.Frequency.DAILY.value] except Exception: # pylint: disable=broad-except - Logger.warning( - f"Error while fetching data from GrDF: {traceback.format_exc()}" - ) - data = {} + Logger.warning(f"Error while fetching data from GrDF: {traceback.format_exc()}") + res = MeterReadings() - # Timezone - timezone = pytz.timezone(self._timezone) + return res - # Compute and fill statistics. - daily = data.get(pygazpar.Frequency.DAILY.value) - statistics = [] - total = last_value - for reading in daily: + # ---------------------------------- + # Extract a given property from the daily Gazpar history and return a DateArray. + def extract_property_from_daily_gazpar_history( + self, + readings: MeterReadings, + property_name: str, + start_date: date, + end_date: date, + ) -> Optional[DateArray]: + + # Fill the quantity array. + res: Optional[DateArray] = None + + for reading in readings: # Parse date format DD/MM/YYYY into datetime. - date = datetime.strptime( - reading[pygazpar.PropertyName.TIME_PERIOD.value], "%d/%m/%Y" - ) + reading_date = datetime.strptime(reading[pygazpar.PropertyName.TIME_PERIOD.value], "%d/%m/%Y").date() - # Set the timezone - date = timezone.localize(date) + # Skip all readings before the start date. + if reading_date < start_date: + # Logger.debug(f"Skip date: {reading_date} < {start_date}") + continue - # Skip all readings before the last statistic date. - if date <= last_date: - Logger.debug(f"Skip date: {date} <= {last_date}") + # Skip all readings after the end date. + if reading_date > end_date: + # Logger.debug(f"Skip date: {reading_date} > {end_date}") continue - # Compute the total volume and energy + # Fill the quantity array. if reading[property_name] is not None: - total += reading[property_name] - else: - Logger.warning( - f"Missing property {property_name} for date {date}. Skipping..." - ) - continue + if res is None: + res = DateArray(start_date=start_date, end_date=end_date) + res[reading_date] = reading[property_name] + + return res + + # ---------------------------------- + # Push a date array to Home Assistant. + async def publish_date_array( + self, + entity_id: str, + unit_of_measurement: str, + date_array: DateArray, + initial_value: float, + ): - statistics.append({"start": date.isoformat(), "state": total, "sum": total}) + # Compute the cumulative sum of the values. + total_array = date_array.cumsum() + initial_value + + # Timezone + timezone = pytz.timezone(self._timezone) + + # Fill the statistics. + statistics = [] + for dt, total in total_array: + # Set the timezone + date_time = datetime.combine(dt, datetime.min.time()) + date_time = timezone.localize(date_time) + statistics.append({"start": date_time.isoformat(), "state": total, "sum": total}) # Publish statistics to Home Assistant try: @@ -189,9 +266,7 @@ async def _publish_entity( entity_id, "recorder", "gazpar2haws", unit_of_measurement, statistics ) except Exception: - Logger.warning( - f"Error while importing statistics to Home Assistant: {traceback.format_exc()}" - ) + Logger.warning(f"Error while importing statistics to Home Assistant: {traceback.format_exc()}") raise # ---------------------------------- @@ -209,36 +284,31 @@ def _create_data_source(self) -> pygazpar.datasource.IDataSource: tmpDirectory=self._tmp_dir, ) - return pygazpar.JsonWebDataSource( - username=self._username, password=self._password - ) + return pygazpar.JsonWebDataSource(username=self._username, password=self._password) # ---------------------------------- - # Find last date, days and value of the entity. - async def _find_last_date_days_value( - self, entity_id: str - ) -> tuple[datetime, int, float]: + # Find last date, value of the entity. + async def find_last_date_and_value(self, entity_id: str) -> tuple[date, float]: # Check the existence of the sensor in Home Assistant try: - exists_statistic_id = await self._homeassistant.exists_statistic_id( - entity_id, "sum" - ) + exists_statistic_id = await self._homeassistant.exists_statistic_id(entity_id, "sum") except Exception: Logger.warning( - f"Error while checking the existence of the sensor in Home Assistant: {traceback.format_exc()}" + f"Error while checking the existence of the entity '{entity_id}' in Home Assistant: {traceback.format_exc()}" ) raise if exists_statistic_id: # Get the last statistic from Home Assistant try: - last_statistic = await self._homeassistant.get_last_statistic( - entity_id, self._as_of_date, self._last_days - ) + as_of_date = datetime.combine(self._as_of_date, datetime.min.time()) + as_of_date = pytz.timezone(self._timezone).localize(as_of_date) + + last_statistic = await self._homeassistant.get_last_statistic(entity_id, as_of_date, self._last_days) except HomeAssistantWSException: Logger.warning( - f"Error while fetching last statistics from Home Assistant: {traceback.format_exc()}" + f"Error while fetching last statistics of the entity '{entity_id}' from Home Assistant: {traceback.format_exc()}" ) if last_statistic: @@ -246,35 +316,25 @@ async def _find_last_date_days_value( last_date = datetime.fromtimestamp( int(str(last_statistic.get("start"))) / 1000, tz=pytz.timezone(self._timezone), - ) - - # Compute the number of days since the last statistics - last_days = (self._as_of_date - last_date).days + ).date() # Get the last meter value last_value = float(str(last_statistic.get("sum"))) - Logger.debug( - f"Last date: {last_date}, last days: {last_days}, last value: {last_value}" - ) + Logger.debug(f"Entity '{entity_id}' => Last date: {last_date}, last value: {last_value}") - return last_date, last_days, last_value + return last_date, last_value - Logger.debug(f"No statistics found for the existing sensor {entity_id}.") + Logger.debug(f"Entity '{entity_id}' => No statistics found.") else: - Logger.debug(f"Sensor {entity_id} does not exist in Home Assistant.") - - # If the sensor does not exist in Home Assistant, fetch the last days defined in the configuration - last_days = self._last_days + Logger.debug(f"Entity '{entity_id}' does not exist in Home Assistant.") # Compute the corresponding last_date - last_date = self._as_of_date - timedelta(days=last_days) + last_date = self._as_of_date - timedelta(days=self._last_days) # If no statistic, the last value is initialized to zero last_value = 0 - Logger.debug( - f"Last date: {last_date}, last days: {last_days}, last value: {last_value}" - ) + Logger.debug(f"Entity '{entity_id}' => Last date: {last_date}, last value: {last_value}") - return last_date, last_days, last_value + return last_date, last_value diff --git a/gazpar2haws/haws.py b/gazpar2haws/haws.py index b5910cb..17785bb 100644 --- a/gazpar2haws/haws.py +++ b/gazpar2haws/haws.py @@ -15,7 +15,7 @@ class HomeAssistantWSException(Exception): # ---------------------------------- class HomeAssistantWS: # ---------------------------------- - def __init__(self, host: str, port: str, endpoint: str, token: str): + def __init__(self, host: str, port: int, endpoint: str, token: str): self._host = host self._port = port self._endpoint = endpoint @@ -92,9 +92,7 @@ async def send_message(self, message: dict) -> dict | list[dict]: raise HomeAssistantWSException(f"Invalid response message: {response_data}") if not response_data.get("success"): - raise HomeAssistantWSException( - f"Request failed: {response_data.get('error')}" - ) + raise HomeAssistantWSException(f"Request failed: {response_data.get('error')}") return response_data.get("result") @@ -122,17 +120,13 @@ async def list_statistic_ids(self, statistic_type: str | None = None) -> list[di return response # ---------------------------------- - async def exists_statistic_id( - self, entity_id: str, statistic_type: str | None = None - ) -> bool: + async def exists_statistic_id(self, entity_id: str, statistic_type: str | None = None) -> bool: Logger.debug(f"Checking if {entity_id} exists...") statistic_ids = await self.list_statistic_ids(statistic_type) - entity_ids = [ - statistic_id.get("statistic_id") for statistic_id in statistic_ids - ] + entity_ids = [statistic_id.get("statistic_id") for statistic_id in statistic_ids] exists_statistic = entity_id in entity_ids @@ -141,13 +135,9 @@ async def exists_statistic_id( return exists_statistic # ---------------------------------- - async def statistics_during_period( - self, entity_ids: list[str], start_time: datetime, end_time: datetime - ) -> dict: + async def statistics_during_period(self, entity_ids: list[str], start_time: datetime, end_time: datetime) -> dict: - Logger.debug( - f"Getting {entity_ids} statistics during period from {start_time} to {end_time}..." - ) + Logger.debug(f"Getting {entity_ids} statistics during period from {start_time} to {end_time}...") # Subscribe to statistics statistics_message = { @@ -166,16 +156,12 @@ async def statistics_during_period( f"Invalid statistics_during_period response type: got {type(response)} instead of dict" ) - Logger.debug( - f"Received {entity_ids} statistics during period from {start_time} to {end_time}" - ) + Logger.debug(f"Received {entity_ids} statistics during period from {start_time} to {end_time}") return response # ---------------------------------- - async def get_last_statistic( - self, entity_id: str, as_of_date: datetime, depth_days: int - ) -> dict: + async def get_last_statistic(self, entity_id: str, as_of_date: datetime, depth_days: int) -> dict: Logger.debug(f"Getting last statistic for {entity_id}...") @@ -201,9 +187,7 @@ async def import_statistics( statistics: list[dict], ): - Logger.debug( - f"Importing {len(statistics)} statistics for {entity_id} from {source}..." - ) + Logger.debug(f"Importing {len(statistics)} statistics for {entity_id} from {source}...") if len(statistics) == 0: Logger.debug("No statistics to import") @@ -225,9 +209,7 @@ async def import_statistics( await self.send_message(import_statistics_message) - Logger.debug( - f"Imported {len(statistics)} statistics for {entity_id} from {source}" - ) + Logger.debug(f"Imported {len(statistics)} statistics for {entity_id} from {source}") # ---------------------------------- async def clear_statistics(self, entity_ids: list[str]): diff --git a/gazpar2haws/model.py b/gazpar2haws/model.py new file mode 100644 index 0000000..df9721c --- /dev/null +++ b/gazpar2haws/model.py @@ -0,0 +1,234 @@ +from datetime import date +from enum import Enum +from pathlib import Path +from typing import Generic, Optional, TypeVar + +from pydantic import BaseModel, DirectoryPath, EmailStr, SecretStr, model_validator +from pydantic_extra_types.timezone_name import TimeZoneName + +from gazpar2haws.date_array import DateArray + + +# ---------------------------------- +class LoggingLevel(str, Enum): + DEBUG = "debug" + INFO = "info" + WARNING = "warning" + ERROR = "error" + CRITICAL = "critical" + + +# ---------------------------------- +class TimeUnit(str, Enum): + DAY = "day" + WEEK = "week" + MONTH = "month" + YEAR = "year" + + +# ---------------------------------- +class PriceUnit(str, Enum): + EURO = "€" + CENT = "¢" + + +# ---------------------------------- +class QuantityUnit(str, Enum): + MWH = "MWh" + KWH = "kWh" + WH = "Wh" + M3 = "m³" + LITER = "l" + + +# ---------------------------------- +class Logging(BaseModel): + file: str + console: bool + level: LoggingLevel + format: str + + +# ---------------------------------- +class Device(BaseModel): + name: str + data_source: str = "json" + tmp_dir: DirectoryPath = DirectoryPath("/tmp") + as_of_date: date = date.today() + username: Optional[EmailStr] = None + password: Optional[SecretStr] = None + pce_identifier: Optional[SecretStr] = None + timezone: TimeZoneName = TimeZoneName("Europe/Paris") + last_days: int = 365 + reset: bool = False + + @model_validator(mode="after") + def validate_properties(self): + if self.data_source not in ["json", "excel", "test"]: + raise ValueError(f"Invalid data_source{self.data_source} (expected values: json, excel, test)") + if self.data_source != "test" and self.username is None: + raise ValueError("Missing username") + if self.data_source != "test" and self.password is None: + raise ValueError("Missing password") + if self.data_source != "test" and self.pce_identifier is None: + raise ValueError("Missing pce_identifier") + if self.data_source == "excel" and self.tmp_dir is None or not Path(self.tmp_dir).is_dir(): + raise ValueError(f"Invalid tmp_dir {self.tmp_dir}") + return self + + +# ---------------------------------- +class Grdf(BaseModel): + scan_interval: Optional[int] = 480 + devices: list[Device] + + +# ---------------------------------- +class HomeAssistant(BaseModel): + host: str + port: int + endpoint: str = "/api/websocket" + token: SecretStr + + +# ---------------------------------- +class Period(BaseModel): + start_date: date + end_date: Optional[date] = None + + +# ---------------------------------- +class Value(Period): + value: float + + +# ---------------------------------- +class ValueArray(Period): + value_array: Optional[DateArray] = None + + @model_validator(mode="after") + def set_value_array(self): + if self.value_array is None: + self.value_array = DateArray( + start_date=self.start_date, end_date=self.end_date + ) # pylint: disable=attribute-defined-outside-init + return self + + +# ---------------------------------- +class Vat(BaseModel): + id: str + + +# ---------------------------------- +class VatRate(Vat, Value): + pass + + +# ---------------------------------- +class VatRateArray(Vat, ValueArray): + pass + + +# ---------------------------------- +# Define type variables +ValueUnit = TypeVar("ValueUnit") +BaseUnit = TypeVar("BaseUnit") + + +# ---------------------------------- +class Unit(BaseModel, Generic[ValueUnit, BaseUnit]): + value_unit: Optional[ValueUnit] = None + base_unit: Optional[BaseUnit] = None + + +# ---------------------------------- +class Price(Unit[ValueUnit, BaseUnit]): # pylint: disable=too-few-public-methods + vat_id: Optional[str] = None + + +# ---------------------------------- +class PriceValue(Price[ValueUnit, BaseUnit], Value): + pass + + +# ---------------------------------- +class PriceValueArray(Price[ValueUnit, BaseUnit], ValueArray): + pass + + +# ---------------------------------- +class ConsumptionPriceArray(PriceValueArray[PriceUnit, QuantityUnit]): # pylint: disable=too-few-public-methods + pass + + +# ---------------------------------- +class SubscriptionPriceArray(PriceValueArray[PriceUnit, TimeUnit]): # pylint: disable=too-few-public-methods + pass + + +# ---------------------------------- +class TransportPriceArray(PriceValueArray[PriceUnit, TimeUnit]): # pylint: disable=too-few-public-methods + pass + + +# ---------------------------------- +class EnergyTaxesPriceArray(PriceValueArray[PriceUnit, QuantityUnit]): # pylint: disable=too-few-public-methods + pass + + +# ---------------------------------- +class Pricing(BaseModel): + vat: Optional[list[VatRate]] = None + consumption_prices: list[PriceValue[PriceUnit, QuantityUnit]] + subscription_prices: Optional[list[PriceValue[PriceUnit, TimeUnit]]] = None + transport_prices: Optional[list[PriceValue[PriceUnit, TimeUnit]]] = None + energy_taxes: Optional[list[PriceValue[PriceUnit, QuantityUnit]]] = None + + @model_validator(mode="before") + @classmethod + def propagates_properties(cls, values): + for price_list in [ + "consumption_prices", + "subscription_prices", + "transport_prices", + "energy_taxes", + ]: + prices = values.get(price_list, []) + + if len(prices) == 0: + continue + + if "start_date" not in prices[0]: + raise ValueError(f"Missing start_date in first element of {price_list}") + if "value_unit" not in prices[0]: + prices[0]["value_unit"] = "€" + if "base_unit" not in prices[0]: + if price_list in ["consumption_prices", "energy_taxes"]: + prices[0]["base_unit"] = "kWh" + else: + raise ValueError( + "Missing base_unit in first element of ['transport_prices', 'subscription_prices']" + ) + + for i in range(len(prices) - 1): + if "end_date" not in prices[i]: + prices[i]["end_date"] = prices[i + 1]["start_date"] + if "value_unit" not in prices[i + 1]: + prices[i + 1]["value_unit"] = prices[i]["value_unit"] + if "base_unit" not in prices[i + 1]: + prices[i + 1]["base_unit"] = prices[i]["base_unit"] + if "vat_id" not in prices[i + 1] and "vat_id" in prices[i]: + prices[i + 1]["vat_id"] = prices[i]["vat_id"] + + return values + + +# ---------------------------------- +class ConsumptionQuantityArray(Unit[QuantityUnit, TimeUnit], ValueArray): + pass + + +# ---------------------------------- +class CostArray(Unit[PriceUnit, TimeUnit], ValueArray): + pass diff --git a/gazpar2haws/pricer.py b/gazpar2haws/pricer.py new file mode 100644 index 0000000..42b887a --- /dev/null +++ b/gazpar2haws/pricer.py @@ -0,0 +1,571 @@ +import calendar +from datetime import date, timedelta +from typing import Optional, Tuple, overload + +from gazpar2haws.model import ( + BaseUnit, + ConsumptionPriceArray, + ConsumptionQuantityArray, + CostArray, + EnergyTaxesPriceArray, + PriceUnit, + PriceValue, + Pricing, + QuantityUnit, + SubscriptionPriceArray, + TimeUnit, + TransportPriceArray, + Value, + ValueArray, + ValueUnit, + VatRate, + VatRateArray, +) + + +class Pricer: + + # ---------------------------------- + def __init__(self, pricing: Pricing): + self._pricing = pricing + + # ---------------------------------- + def pricing_data(self) -> Pricing: + return self._pricing + + # ---------------------------------- + def compute( # pylint: disable=too-many-branches + self, quantities: ConsumptionQuantityArray, price_unit: PriceUnit + ) -> CostArray: + + if quantities is None: + raise ValueError("quantities is None") + + if quantities.start_date is None: + raise ValueError("quantities.start_date is None") + + start_date = quantities.start_date + + if quantities.end_date is None: + raise ValueError("quantities.end_date is None") + + end_date = quantities.end_date + + if quantities.value_array is None: + raise ValueError("quantities.value_array is None") + + if quantities.value_unit is None: + raise ValueError("quantities.value_unit is None") + + if quantities.base_unit is None: + raise ValueError("quantities.base_unit is None") + + quantity_array = quantities.value_array + + # Convert all pricing data to the same unit as the quantities. + consumption_prices = Pricer.convert(self._pricing.consumption_prices, (price_unit, quantities.value_unit)) + + if self._pricing.subscription_prices is not None and len(self._pricing.subscription_prices) > 0: + subscription_prices = Pricer.convert(self._pricing.subscription_prices, (price_unit, quantities.base_unit)) + else: + subscription_prices = None + + if self._pricing.transport_prices is not None and len(self._pricing.transport_prices) > 0: + transport_prices = Pricer.convert(self._pricing.transport_prices, (price_unit, quantities.base_unit)) + else: + transport_prices = None + + if self._pricing.energy_taxes is not None and len(self._pricing.energy_taxes) > 0: + energy_taxes = Pricer.convert(self._pricing.energy_taxes, (price_unit, quantities.value_unit)) + else: + energy_taxes = None + + # Transform to the vectorized form. + if self._pricing.vat is not None and len(self._pricing.vat) > 0: + vat_rate_array_by_id = self.get_vat_rate_array_by_id( + start_date=start_date, end_date=end_date, vat_rates=self._pricing.vat + ) + else: + vat_rate_array_by_id = dict[str, VatRateArray]() + + consumption_price_array = self.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + # Subscription price is optional. + if subscription_prices is not None and len(subscription_prices) > 0: + subscription_price_array = self.get_subscription_price_array( + start_date=start_date, + end_date=end_date, + subscription_prices=subscription_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + else: + subscription_price_array = SubscriptionPriceArray( + start_date=start_date, + end_date=end_date, + value_unit=price_unit, + base_unit=quantities.base_unit, + ) + + # Transport price is optional. + if transport_prices is not None and len(transport_prices) > 0: + transport_price_array = self.get_transport_price_array( + start_date=start_date, + end_date=end_date, + transport_prices=transport_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + else: + transport_price_array = TransportPriceArray( + start_date=start_date, + end_date=end_date, + value_unit=price_unit, + base_unit=quantities.base_unit, + ) + + # Energy taxes are optional. + if energy_taxes is not None and len(energy_taxes) > 0: + energy_taxes_price_array = self.get_energy_taxes_price_array( + start_date=start_date, + end_date=end_date, + energy_taxes_prices=energy_taxes, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + else: + energy_taxes_price_array = EnergyTaxesPriceArray( + start_date=start_date, + end_date=end_date, + value_unit=price_unit, + base_unit=quantities.value_unit, + ) + + res = CostArray( + start_date=start_date, + end_date=end_date, + value_unit=price_unit, + base_unit=quantities.base_unit, + ) + + # Compute pricing formula + res.value_array = quantity_array * (consumption_price_array.value_array + energy_taxes_price_array.value_array) + subscription_price_array.value_array + transport_price_array.value_array # type: ignore + + return res + + # ---------------------------------- + @classmethod + def get_vat_rate_array_by_id( + cls, start_date: date, end_date: date, vat_rates: list[VatRate] + ) -> dict[str, VatRateArray]: + + if vat_rates is None or len(vat_rates) == 0: + raise ValueError("vat_rates is None or empty") + + res = dict[str, VatRateArray]() + vat_rate_by_id = dict[str, list[VatRate]]() + for vat_rate in vat_rates: + res[vat_rate.id] = VatRateArray(id=vat_rate.id, start_date=start_date, end_date=end_date) + if vat_rate.id not in vat_rate_by_id: + vat_rate_by_id[vat_rate.id] = list[VatRate]() + vat_rate_by_id[vat_rate.id].append(vat_rate) + + for vat_id, vat_rate_list in vat_rate_by_id.items(): + cls._fill_value_array(res[vat_id], vat_rate_list) # type: ignore + + return res + + # ---------------------------------- + @classmethod + def get_consumption_price_array( + cls, + start_date: date, + end_date: date, + consumption_prices: list[PriceValue[PriceUnit, QuantityUnit]], + vat_rate_array_by_id: dict[str, VatRateArray], + ) -> ConsumptionPriceArray: + + if consumption_prices is None or len(consumption_prices) == 0: + raise ValueError("consumption_prices is None or empty") + + first_consumption_price = consumption_prices[0] + + res = ConsumptionPriceArray( + start_date=start_date, + end_date=end_date, + value_unit=first_consumption_price.value_unit, + base_unit=first_consumption_price.base_unit, + vat_id=first_consumption_price.vat_id, + ) + + cls._fill_price_array(res, consumption_prices, vat_rate_array_by_id) # type: ignore + + return res + + # ---------------------------------- + @classmethod + def get_subscription_price_array( + cls, + start_date: date, + end_date: date, + subscription_prices: list[PriceValue[PriceUnit, TimeUnit]], + vat_rate_array_by_id: dict[str, VatRateArray], + ) -> SubscriptionPriceArray: + + if subscription_prices is None or len(subscription_prices) == 0: + raise ValueError("subscription_prices is None or empty") + + first_subscription_price = subscription_prices[0] + + res = SubscriptionPriceArray( + start_date=start_date, + end_date=end_date, + value_unit=first_subscription_price.value_unit, + base_unit=first_subscription_price.base_unit, + vat_id=first_subscription_price.vat_id, + ) + + cls._fill_price_array(res, subscription_prices, vat_rate_array_by_id) # type: ignore + + return res + + # ---------------------------------- + @classmethod + def get_transport_price_array( + cls, + start_date: date, + end_date: date, + transport_prices: list[PriceValue[PriceUnit, TimeUnit]], + vat_rate_array_by_id: dict[str, VatRateArray], + ) -> TransportPriceArray: + + if transport_prices is None or len(transport_prices) == 0: + raise ValueError("transport_prices is None or empty") + + first_transport_price = transport_prices[0] + + res = TransportPriceArray( + start_date=start_date, + end_date=end_date, + value_unit=first_transport_price.value_unit, + base_unit=first_transport_price.base_unit, + vat_id=first_transport_price.vat_id, + ) + + cls._fill_price_array(res, transport_prices, vat_rate_array_by_id) # type: ignore + + return res + + # ---------------------------------- + @classmethod + def get_energy_taxes_price_array( + cls, + start_date: date, + end_date: date, + energy_taxes_prices: list[PriceValue[PriceUnit, QuantityUnit]], + vat_rate_array_by_id: dict[str, VatRateArray], + ) -> EnergyTaxesPriceArray: + + if energy_taxes_prices is None or len(energy_taxes_prices) == 0: + raise ValueError("energy_taxes_prices is None or empty") + + first_energy_taxes_price = energy_taxes_prices[0] + + res = EnergyTaxesPriceArray( + start_date=start_date, + end_date=end_date, + value_unit=first_energy_taxes_price.value_unit, + base_unit=first_energy_taxes_price.base_unit, + vat_id=first_energy_taxes_price.vat_id, + ) + + cls._fill_price_array(res, energy_taxes_prices, vat_rate_array_by_id) # type: ignore + + return res + + # ---------------------------------- + @classmethod + def _fill_value_array(cls, out_value_array: ValueArray, in_values: list[Value]) -> None: + + if out_value_array is None: + raise ValueError("out_value_array is None") + + if out_value_array.start_date is None: + raise ValueError("out_value_array.start_date is None") + + start_date = out_value_array.start_date + + if out_value_array.end_date is None: + raise ValueError("out_value_array.end_date is None") + + end_date = out_value_array.end_date + + if out_value_array.value_array is None: + raise ValueError("out_value_array.value_array is None") + + value_array = out_value_array.value_array + + if in_values is None or len(in_values) == 0: + raise ValueError("in_values is None or empty") + + first_value = in_values[0] + last_value = in_values[-1] + + if first_value.start_date > end_date: + # Fully before first value period. + value_array[start_date:end_date] = first_value.value # type: ignore + elif last_value.end_date is not None and last_value.end_date < start_date: + # Fully after last value period. + value_array[start_date:end_date] = last_value.value # type: ignore + else: + if start_date < first_value.start_date: + # Partially before first value period. + value_array[start_date : first_value.start_date] = first_value.value # type: ignore + if last_value.end_date is not None and end_date > last_value.end_date: + # Partially after last value period. + value_array[last_value.end_date : end_date] = last_value.value # type: ignore + # Inside value periods. + for value in in_values: + latest_start = max(value.start_date, start_date) + earliest_end = min(value.end_date if value.end_date is not None else end_date, end_date) + current_date = latest_start + while current_date <= earliest_end: + value_array[current_date] = value.value + current_date += timedelta(days=1) + + # ---------------------------------- + @classmethod + def _fill_price_array( # pylint: disable=too-many-branches + cls, + out_value_array: ValueArray, + in_values: list[PriceValue], + vat_rate_array_by_id: dict[str, VatRateArray], + ) -> None: + + if out_value_array is None: + raise ValueError("out_value_array is None") + + if out_value_array.start_date is None: + raise ValueError("out_value_array.start_date is None") + + start_date = out_value_array.start_date + + if out_value_array.end_date is None: + raise ValueError("out_value_array.end_date is None") + + end_date = out_value_array.end_date + + if out_value_array.value_array is None: + raise ValueError("out_value_array.value_array is None") + + value_array = out_value_array.value_array + + if in_values is None or len(in_values) == 0: + raise ValueError("in_values is None or empty") + + first_value = in_values[0] + last_value = in_values[-1] + + if first_value.start_date > end_date: + # Fully before first value period. + if vat_rate_array_by_id is not None and first_value.vat_id in vat_rate_array_by_id: + vat_value = vat_rate_array_by_id[first_value.vat_id].value_array[start_date:end_date] # type: ignore + else: + vat_value = 0.0 + value_array[start_date:end_date] = first_value.value * (1 + vat_value) # type: ignore + elif last_value.end_date is not None and last_value.end_date < start_date: + # Fully after last value period. + if vat_rate_array_by_id is not None and last_value.vat_id in vat_rate_array_by_id: + vat_value = vat_rate_array_by_id[last_value.vat_id].value_array[start_date:end_date] # type: ignore + else: + vat_value = 0.0 + value_array[start_date:end_date] = last_value.value * (1 + vat_value) # type: ignore + else: + if start_date < first_value.start_date: + # Partially before first value period. + if vat_rate_array_by_id is not None and first_value.vat_id in vat_rate_array_by_id: + vat_value = vat_rate_array_by_id[first_value.vat_id].value_array[start_date : first_value.start_date] # type: ignore + else: + vat_value = 0.0 + value_array[start_date : first_value.start_date] = first_value.value * (1 + vat_value) # type: ignore + if last_value.end_date is not None and end_date > last_value.end_date: + # Partially after last value period. + if vat_rate_array_by_id is not None and last_value.vat_id in vat_rate_array_by_id: + vat_value = vat_rate_array_by_id[last_value.vat_id].value_array[last_value.end_date : end_date] # type: ignore + else: + vat_value = 0.0 + value_array[last_value.end_date : end_date] = last_value.value * (1 + vat_value) # type: ignore + # Inside value periods. + for value in in_values: + latest_start = max(value.start_date, start_date) + earliest_end = min(value.end_date if value.end_date is not None else end_date, end_date) + current_date = latest_start + while current_date <= earliest_end: + if vat_rate_array_by_id is not None and value.vat_id in vat_rate_array_by_id: + vat_value = vat_rate_array_by_id[value.vat_id].value_array[current_date] # type: ignore + else: + vat_value = 0.0 + value_array[current_date] = value.value * (1 + vat_value) # type: ignore + current_date += timedelta(days=1) + + # ---------------------------------- + @classmethod + def get_time_unit_convertion_factor(cls, from_time_unit: TimeUnit, to_time_unit: TimeUnit, dt: date) -> float: + + if from_time_unit == to_time_unit: + return 1.0 + + def days_in_month(year: int, month: int) -> int: + return calendar.monthrange(year, month)[1] + + def days_in_year(year: int) -> int: + return 366 if calendar.isleap(year) else 365 + + if TimeUnit.MONTH in (from_time_unit, to_time_unit): + switcher = { + TimeUnit.DAY: days_in_month(dt.year, dt.month), + TimeUnit.WEEK: days_in_month(dt.year, dt.month) / 7.0, + TimeUnit.MONTH: 1.0, + TimeUnit.YEAR: 1.0 / 12.0, + } + else: + switcher = { + TimeUnit.DAY: 1.0, + TimeUnit.WEEK: 1 / 7.0, + TimeUnit.MONTH: 1 / days_in_month(dt.year, dt.month), + TimeUnit.YEAR: 1 / days_in_year(dt.year), + } + + if from_time_unit not in switcher: + raise ValueError(f"Invalid 'from' time unit: {from_time_unit}") + + if to_time_unit not in switcher: + raise ValueError(f"Invalid 'to' time unit: {to_time_unit}") + + return switcher[to_time_unit] / switcher[from_time_unit] + + # ---------------------------------- + @classmethod + def get_price_unit_convertion_factor(cls, from_price_unit: PriceUnit, to_price_unit: PriceUnit) -> float: + + if from_price_unit == to_price_unit: + return 1.0 + + switcher = { + PriceUnit.EURO: 1.0, + PriceUnit.CENT: 100.0, + } + + if from_price_unit not in switcher: + raise ValueError(f"Invalid 'from' price unit: {from_price_unit}") + + if to_price_unit not in switcher: + raise ValueError(f"Invalid 'to' price unit: {to_price_unit}") + + return switcher[to_price_unit] / switcher[from_price_unit] + + # ---------------------------------- + @classmethod + def get_quantity_unit_convertion_factor( + cls, from_quantity_unit: QuantityUnit, to_quantity_unit: QuantityUnit + ) -> float: + + if from_quantity_unit == to_quantity_unit: + return 1.0 + + switcher = { + QuantityUnit.WH: 1.0, + QuantityUnit.KWH: 0.001, + QuantityUnit.MWH: 0.000001, + } + + if from_quantity_unit not in switcher: + raise ValueError(f"Invalid 'from' quantity unit: {from_quantity_unit}") + + if to_quantity_unit not in switcher: + raise ValueError(f"Invalid 'to' quantity unit: {to_quantity_unit}") + + return switcher[to_quantity_unit] / switcher[from_quantity_unit] + + # ---------------------------------- + @overload + @classmethod + def get_convertion_factor( + cls, + from_unit: Tuple[PriceUnit, QuantityUnit], + to_unit: Tuple[PriceUnit, QuantityUnit], + dt: Optional[date] = None, + ) -> float: ... + + @overload + @classmethod + def get_convertion_factor( + cls, + from_unit: Tuple[PriceUnit, TimeUnit], + to_unit: Tuple[PriceUnit, TimeUnit], + dt: Optional[date] = None, + ) -> float: ... + + @classmethod + def get_convertion_factor(cls, from_unit, to_unit, dt: Optional[date] = None) -> float: + if type(from_unit) is not type(to_unit): + raise ValueError(f"from_unit {from_unit} and to_unit {to_unit} must be of the same type") + if ( + isinstance(from_unit, tuple) + and isinstance(from_unit[0], PriceUnit) + and isinstance(from_unit[1], QuantityUnit) + ): + return cls.get_price_unit_convertion_factor( + from_unit[0], to_unit[0] + ) / cls.get_quantity_unit_convertion_factor(from_unit[1], to_unit[1]) + if isinstance(from_unit, tuple) and isinstance(from_unit[0], PriceUnit) and isinstance(from_unit[1], TimeUnit): + if dt is None: + raise ValueError( + f"dt must not be None when from_unit {from_unit} and to_unit {to_unit} are of type Tuple[PriceUnit, TimeUnit]" + ) + return cls.get_price_unit_convertion_factor(from_unit[0], to_unit[0]) / cls.get_time_unit_convertion_factor( + from_unit[1], to_unit[1], dt + ) + + raise ValueError( + f"from_unit {from_unit} and to_unit {to_unit} must be of type Tuple[PriceUnit, QuantityUnit] or Tuple[PriceUnit, TimeUnit]" + ) + + # ---------------------------------- + @classmethod + def convert( + cls, + price_values: list[PriceValue[ValueUnit, BaseUnit]], + to_unit: Tuple[ValueUnit, BaseUnit], + ) -> list[PriceValue[ValueUnit, BaseUnit]]: + + if price_values is None or len(price_values) == 0: + raise ValueError("price_values is None or empty") + + if to_unit is None: + raise ValueError("to_unit is None") + + res = list[PriceValue[ValueUnit, BaseUnit]]() + for price_value in price_values: + if price_value.value_unit is None: + raise ValueError("price_value.value_unit is None") + if price_value.base_unit is None: + raise ValueError("price_value.base_unit is None") + + res.append( + PriceValue( + start_date=price_value.start_date, + end_date=price_value.end_date, + value=price_value.value + * cls.get_convertion_factor( + (price_value.value_unit, price_value.base_unit), to_unit, price_value.start_date # type: ignore + ), + value_unit=to_unit[0], + base_unit=to_unit[1], + vat_id=price_value.vat_id, + ) + ) + + return res diff --git a/poetry.lock b/poetry.lock index ea9ae6c..e3d6827 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,82 @@ # This file is automatically @generated by Poetry 2.0.0 and should not be changed by hand. +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "astroid" +version = "3.3.8" +description = "An abstract syntax tree for Python with inference support." +optional = false +python-versions = ">=3.9.0" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c"}, + {file = "astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "certifi" version = "2024.12.14" @@ -116,6 +193,22 @@ files = [ {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + [[package]] name = "colorama" version = "0.4.6" @@ -123,12 +216,68 @@ description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" groups = ["dev"] -markers = "(python_version <= \"3.11\" or python_version >= \"3.12\") and sys_platform == \"win32\"" +markers = "(sys_platform == \"win32\" or platform_system == \"Windows\") and (python_version <= \"3.11\" or python_version >= \"3.12\")" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "dill" +version = "0.3.9" +description = "serialize all of Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, + {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, +] + +[package.extras] +graph = ["objgraph (>=1.7.2)"] +profile = ["gprof2dot (>=2022.7.29)"] + +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + [[package]] name = "et-xmlfile" version = "2.0.0" @@ -158,6 +307,43 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "flake8" +version = "7.1.1" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, + {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "flake8-pyproject" +version = "1.2.3" +description = "Flake8 plug-in loading the configuration from pyproject.toml" +optional = false +python-versions = ">= 3.6" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "flake8_pyproject-1.2.3-py3-none-any.whl", hash = "sha256:6249fe53545205af5e76837644dc80b4c10037e73a0e5db87ff562d75fb5bd4a"}, +] + +[package.dependencies] +Flake8 = ">=5" +TOMLi = {version = "*", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["pyTest", "pyTest-cov"] + [[package]] name = "idna" version = "3.10" @@ -187,6 +373,110 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "isort" +version = "6.0.0" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.9.0" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "isort-6.0.0-py3-none-any.whl", hash = "sha256:567954102bb47bb12e0fae62606570faacddd441e45683968c8d1734fb1af892"}, + {file = "isort-6.0.0.tar.gz", hash = "sha256:75d9d8a1438a9432a7d7b54f2d3b45cad9a4a0fdba43617d9873379704a8bdf1"}, +] + +[package.extras] +colors = ["colorama"] +plugins = ["setuptools"] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy" +version = "1.14.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, + {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, + {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, + {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, + {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, + {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, + {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, + {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, + {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, + {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, + {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, + {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, + {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, + {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, + {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + [[package]] name = "numpy" version = "2.0.2" @@ -250,7 +540,7 @@ description = "Fundamental package for array computing in Python" optional = false python-versions = ">=3.10" groups = ["main"] -markers = "python_version == \"3.11\" or python_version >= \"3.12\"" +markers = "python_version >= \"3.12\" or python_version == \"3.11\"" files = [ {file = "numpy-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5edb4e4caf751c1518e6a26a83501fda79bff41cc59dac48d70e6d65d4ec4440"}, {file = "numpy-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa3017c40d513ccac9621a2364f939d39e550c542eb2a894b4c8da92b38896ab"}, @@ -394,8 +684,8 @@ files = [ [package.dependencies] numpy = [ {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -426,6 +716,37 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.9.2)"] +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] + [[package]] name = "pluggy" version = "1.5.0" @@ -443,6 +764,194 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "pycodestyle" +version = "2.12.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, + {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pydantic-extra-types" +version = "2.10.2" +description = "Extra Pydantic types." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pydantic_extra_types-2.10.2-py3-none-any.whl", hash = "sha256:9eccd55a2b7935cea25f0a67f6ff763d55d80c41d86b887d88915412ccf5b7fa"}, + {file = "pydantic_extra_types-2.10.2.tar.gz", hash = "sha256:934d59ab7a02ff788759c3a97bc896f5cfdc91e62e4f88ea4669067a73f14b98"}, +] + +[package.dependencies] +pydantic = ">=2.5.2" +typing-extensions = "*" + +[package.extras] +all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", "python-ulid (>=1,<2)", "python-ulid (>=1,<4)", "pytz (>=2024.1)", "semver (>=3.0.2)", "semver (>=3.0.2,<3.1.0)", "tzdata (>=2024.1)"] +pendulum = ["pendulum (>=3.0.0,<4.0.0)"] +phonenumbers = ["phonenumbers (>=8,<9)"] +pycountry = ["pycountry (>=23)"] +python-ulid = ["python-ulid (>=1,<2)", "python-ulid (>=1,<4)"] +semver = ["semver (>=3.0.2)"] + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + [[package]] name = "pygazpar" version = "1.2.7" @@ -466,6 +975,38 @@ openpyxl = ">=2.6.3" pandas = "*" requests = ">=2.26.0" +[[package]] +name = "pylint" +version = "3.3.4" +description = "python code static checker" +optional = false +python-versions = ">=3.9.0" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "pylint-3.3.4-py3-none-any.whl", hash = "sha256:289e6a1eb27b453b08436478391a48cd53bb0efb824873f949e709350f3de018"}, + {file = "pylint-3.3.4.tar.gz", hash = "sha256:74ae7a38b177e69a9b525d0794bd8183820bfa7eb68cc1bee6e8ed22a42be4ce"}, +] + +[package.dependencies] +astroid = ">=3.3.8,<=3.4.0-dev0" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +isort = ">=4.2.5,<5.13.0 || >5.13.0,<7" +mccabe = ">=0.6,<0.8" +platformdirs = ">=2.2.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +tomlkit = ">=0.10.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +spelling = ["pyenchant (>=3.2,<4.0)"] +testutils = ["gitpython (>3)"] + [[package]] name = "pytest" version = "8.3.4" @@ -626,6 +1167,35 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.9.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "ruff-0.9.4-py3-none-linux_armv6l.whl", hash = "sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706"}, + {file = "ruff-0.9.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf"}, + {file = "ruff-0.9.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:54499fb08408e32b57360f6f9de7157a5fec24ad79cb3f42ef2c3f3f728dfe2b"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37c892540108314a6f01f105040b5106aeb829fa5fb0561d2dcaf71485021137"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de9edf2ce4b9ddf43fd93e20ef635a900e25f622f87ed6e3047a664d0e8f810e"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c90c32357c74f11deb7fbb065126d91771b207bf9bfaaee01277ca59b574ec"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56acd6c694da3695a7461cc55775f3a409c3815ac467279dfa126061d84b314b"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0c93e7d47ed951b9394cf352d6695b31498e68fd5782d6cbc282425655f687a"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4c8772670aecf037d1bf7a07c39106574d143b26cfe5ed1787d2f31e800214"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfc5f1d7afeda8d5d37660eeca6d389b142d7f2b5a1ab659d9214ebd0e025231"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:faa935fc00ae854d8b638c16a5f1ce881bc3f67446957dd6f2af440a5fc8526b"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a6c634fc6f5a0ceae1ab3e13c58183978185d131a29c425e4eaa9f40afe1e6d6"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:433dedf6ddfdec7f1ac7575ec1eb9844fa60c4c8c2f8887a070672b8d353d34c"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d612dbd0f3a919a8cc1d12037168bfa536862066808960e0cc901404b77968f0"}, + {file = "ruff-0.9.4-py3-none-win32.whl", hash = "sha256:db1192ddda2200671f9ef61d9597fcef89d934f5d1705e571a93a67fb13a4402"}, + {file = "ruff-0.9.4-py3-none-win_amd64.whl", hash = "sha256:05bebf4cdbe3ef75430d26c375773978950bbf4ee3c95ccb5448940dc092408e"}, + {file = "ruff-0.9.4-py3-none-win_arm64.whl", hash = "sha256:585792f1e81509e38ac5123492f8875fbc36f3ede8185af0a26df348e5154f41"}, + {file = "ruff-0.9.4.tar.gz", hash = "sha256:6907ee3529244bb0ed066683e075f09285b38dd5b4039370df6ff06041ca19e7"}, +] + [[package]] name = "six" version = "1.17.0" @@ -682,6 +1252,32 @@ files = [ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +markers = "python_version <= \"3.11\" or python_version >= \"3.12\"" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + [[package]] name = "tzdata" version = "2024.2" @@ -797,4 +1393,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = ">=3.9" -content-hash = "4acb69e169aa319f7e89ca50d777d8f7c0084fbb165b6371abc7e2ac84a7e62f" +content-hash = "ce30d76443d3f672756333440d8813f1a95c583b968cc52a227e54c63aaeb5f6" diff --git a/pyproject.template.toml b/pyproject.template.toml index daa9df9..a263a4f 100644 --- a/pyproject.template.toml +++ b/pyproject.template.toml @@ -12,12 +12,14 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.12", ] dependencies = [ "pygazpar>=1.2.7", "websockets>=14.1", - "pyyaml>=6.0.2" + "pyyaml>=6.0.2", + "pydantic[email] (>=2.10.6,<3.0.0)", + "pydantic-extra-types (>=2.10.2,<3.0.0)", ] [tool.poetry] @@ -27,7 +29,39 @@ include = ["CHANGELOG.md"] [tool.poetry.group.dev.dependencies] pytest = "^8.3.4" pytest-asyncio = "^0.25.0" +flake8-pyproject = "^1.2.3" +pylint = "^3.3.4" +black = "^25.1.0" +flake8 = "^7.1.1" +isort = "^6.0.0" +mypy = "^1.14.1" +ruff = "^0.9.4" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.pylint.'MESSAGES CONTROL'] +ignore = ".venv" +max-line-length = 120 +disable = "C,W1203,R0902,R0913,R0914,R0917,R0801" + +[tool.black] +exclude = ".venv" +line-length = 120 + +[tool.flake8] +max-line-length = 120 +extend-ignore = [ "E203", "W503", "E704", "E501" ] +exclude = [".venv"] + +[tool.isort] +profile = "black" +skip = ".venv" + +[tool.mypy] +exclude = [ ".venv" ] + +[tool.ruff] +exclude = [ ".venv" ] +line-length = 120 \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 5010943..ac7a0cc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "gazpar2haws" -version = "0.2.1" +version = "0.3.0.dev14" description = "Gazpar2HAWS is a gateway that reads data history from the GrDF (French gas provider) meter and send it to Home Assistant using WebSocket interface" license = { file = "LICENSE" } readme = "README.md" @@ -12,12 +12,14 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", - "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.12", ] dependencies = [ "pygazpar>=1.2.7", "websockets>=14.1", - "pyyaml>=6.0.2" + "pyyaml>=6.0.2", + "pydantic[email] (>=2.10.6,<3.0.0)", + "pydantic-extra-types (>=2.10.2,<3.0.0)", ] [tool.poetry] @@ -27,7 +29,39 @@ include = ["CHANGELOG.md"] [tool.poetry.group.dev.dependencies] pytest = "^8.3.4" pytest-asyncio = "^0.25.0" +flake8-pyproject = "^1.2.3" +pylint = "^3.3.4" +black = "^25.1.0" +flake8 = "^7.1.1" +isort = "^6.0.0" +mypy = "^1.14.1" +ruff = "^0.9.4" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.pylint.'MESSAGES CONTROL'] +ignore = ".venv" +max-line-length = 120 +disable = "C,W1203,R0902,R0913,R0914,R0917,R0801" + +[tool.black] +exclude = ".venv" +line-length = 120 + +[tool.flake8] +max-line-length = 120 +extend-ignore = [ "E203", "W503", "E704", "E501" ] +exclude = [".venv"] + +[tool.isort] +profile = "black" +skip = ".venv" + +[tool.mypy] +exclude = [ ".venv" ] + +[tool.ruff] +exclude = [ ".venv" ] +line-length = 120 \ No newline at end of file diff --git a/tests/XLPricer.xlsx b/tests/XLPricer.xlsx new file mode 100644 index 0000000..a615aee Binary files /dev/null and b/tests/XLPricer.xlsx differ diff --git a/tests/config/configuration.yaml b/tests/config/configuration.yaml index f6d86a6..c856730 100644 --- a/tests/config/configuration.yaml +++ b/tests/config/configuration.yaml @@ -18,3 +18,62 @@ homeassistant: host: "!secret homeassistant.host" port: "!secret homeassistant.port" token: "!secret homeassistant.token" + +pricing: + vat: + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2023-07-01" + value: 0.05392 + - start_date: "2023-08-01" + value: 0.05568 + - start_date: "2023-09-01" + value: 0.05412 + - start_date: "2023-10-01" + value: 0.06333 + - start_date: "2023-11-01" + value: 0.06716 + - start_date: "2023-12-01" + value: 0.07235 + - start_date: "2024-01-01" + value: 0.06888 + - start_date: "2024-02-01" + value: 0.05972 + - start_date: "2024-03-01" + value: 0.05506 + - start_date: "2024-04-01" + value: 0.04842 + - start_date: "2025-01-01" + value: 0.07807 + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: reduced + - start_date: "2023-07-01" + value: 20.36 + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal + - start_date: "2024-01-01" + value: 0.01637 diff --git a/tests/config/example_1.yaml b/tests/config/example_1.yaml new file mode 100644 index 0000000..147f6d7 --- /dev/null +++ b/tests/config/example_1.yaml @@ -0,0 +1,25 @@ +logging: + file: log/gazpar2haws.log + console: true + level: debug + format: '%(asctime)s %(levelname)s [%(name)s] %(message)s' + +grdf: + scan_interval: 0 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). + devices: + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + data_source: "test" # Data source to use: "json" | "excel" | "test". Default is "json". "test" is a static data source for testing purposes. + as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". + timezone: Europe/Paris + last_days: 365 # Number of days of data to retrieve. + reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + +homeassistant: + host: "!secret homeassistant.host" + port: "!secret homeassistant.port" + token: "!secret homeassistant.token" + +pricing: + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. diff --git a/tests/config/example_2.yaml b/tests/config/example_2.yaml new file mode 100644 index 0000000..9277d60 --- /dev/null +++ b/tests/config/example_2.yaml @@ -0,0 +1,27 @@ +logging: + file: log/gazpar2haws.log + console: true + level: debug + format: '%(asctime)s %(levelname)s [%(name)s] %(message)s' + +grdf: + scan_interval: 0 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). + devices: + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + data_source: "test" # Data source to use: "json" | "excel" | "test". Default is "json". "test" is a static data source for testing purposes. + as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". + timezone: Europe/Paris + last_days: 365 # Number of days of data to retrieve. + reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + +homeassistant: + host: "!secret homeassistant.host" + port: "!secret homeassistant.port" + token: "!secret homeassistant.token" + +pricing: + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 7790.0 # Unit is now ¢/MWh. + value_unit: "¢" + base_unit: "MWh" diff --git a/tests/config/example_3.yaml b/tests/config/example_3.yaml new file mode 100644 index 0000000..19cb4c7 --- /dev/null +++ b/tests/config/example_3.yaml @@ -0,0 +1,27 @@ +logging: + file: log/gazpar2haws.log + console: true + level: debug + format: '%(asctime)s %(levelname)s [%(name)s] %(message)s' + +grdf: + scan_interval: 0 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). + devices: + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + data_source: "test" # Data source to use: "json" | "excel" | "test". Default is "json". "test" is a static data source for testing purposes. + as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". + timezone: Europe/Paris + last_days: 365 # Number of days of data to retrieve. + reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + +homeassistant: + host: "!secret homeassistant.host" + port: "!secret homeassistant.port" + token: "!secret homeassistant.token" + +pricing: + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + - start_date: "2024-01-01" + value: 0.06888 # Default unit is €/kWh. diff --git a/tests/config/example_4.yaml b/tests/config/example_4.yaml new file mode 100644 index 0000000..c88b38e --- /dev/null +++ b/tests/config/example_4.yaml @@ -0,0 +1,30 @@ +logging: + file: log/gazpar2haws.log + console: true + level: debug + format: '%(asctime)s %(levelname)s [%(name)s] %(message)s' + +grdf: + scan_interval: 0 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). + devices: + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + data_source: "test" # Data source to use: "json" | "excel" | "test". Default is "json". "test" is a static data source for testing purposes. + as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". + timezone: Europe/Paris + last_days: 365 # Number of days of data to retrieve. + reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + +homeassistant: + host: "!secret homeassistant.host" + port: "!secret homeassistant.port" + token: "!secret homeassistant.token" + +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. diff --git a/tests/config/example_5.yaml b/tests/config/example_5.yaml new file mode 100644 index 0000000..bf8322a --- /dev/null +++ b/tests/config/example_5.yaml @@ -0,0 +1,39 @@ +logging: + file: log/gazpar2haws.log + console: true + level: debug + format: '%(asctime)s %(levelname)s [%(name)s] %(message)s' + +grdf: + scan_interval: 0 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). + devices: + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + data_source: "test" # Data source to use: "json" | "excel" | "test". Default is "json". "test" is a static data source for testing purposes. + as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". + timezone: Europe/Paris + last_days: 365 # Number of days of data to retrieve. + reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + +homeassistant: + host: "!secret homeassistant.host" + port: "!secret homeassistant.port" + token: "!secret homeassistant.token" + +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. + subscription_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 19.83 + value_unit: "€" + base_unit: "month" + vat_id: "reduced" diff --git a/tests/config/example_6.yaml b/tests/config/example_6.yaml new file mode 100644 index 0000000..f8dd644 --- /dev/null +++ b/tests/config/example_6.yaml @@ -0,0 +1,39 @@ +logging: + file: log/gazpar2haws.log + console: true + level: debug + format: '%(asctime)s %(levelname)s [%(name)s] %(message)s' + +grdf: + scan_interval: 0 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). + devices: + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + data_source: "test" # Data source to use: "json" | "excel" | "test". Default is "json". "test" is a static data source for testing purposes. + as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". + timezone: Europe/Paris + last_days: 365 # Number of days of data to retrieve. + reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + +homeassistant: + host: "!secret homeassistant.host" + port: "!secret homeassistant.port" + token: "!secret homeassistant.token" + +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. + transport_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 34.38 + value_unit: "€" + base_unit: "year" + vat_id: reduced diff --git a/tests/config/example_7.yaml b/tests/config/example_7.yaml new file mode 100644 index 0000000..79b6a30 --- /dev/null +++ b/tests/config/example_7.yaml @@ -0,0 +1,39 @@ +logging: + file: log/gazpar2haws.log + console: true + level: debug + format: '%(asctime)s %(levelname)s [%(name)s] %(message)s' + +grdf: + scan_interval: 0 # Number of minutes between each data retrieval (0 means no scan: a single data retrieval at startup, then stops). + devices: + - name: gazpar2haws # Name of the device in home assistant. It will be used as the entity_id: sensor.${name}. + data_source: "test" # Data source to use: "json" | "excel" | "test". Default is "json". "test" is a static data source for testing purposes. + as_of_date: "2021-04-20" # Date of the data to retrieve. Format is "YYYY-MM-DD". Used only if data_source is "test". + timezone: Europe/Paris + last_days: 365 # Number of days of data to retrieve. + reset: false # If true, the data will be reset before the first data retrieval. If false, the data will be kept and new data will be added. + +homeassistant: + host: "!secret homeassistant.host" + port: "!secret homeassistant.port" + token: "!secret homeassistant.token" + +pricing: + vat: + - id: normal + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.20 # It is the tax rate in [0, 1.0] <==> [0% - 100%]. + - id: reduced + start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.0550 + consumption_prices: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.07790 # Default unit is €/kWh. + vat_id: "normal" # Reference to the vat rate that is applied for this period. + energy_taxes: + - start_date: "2023-06-01" # Date of the price. Format is "YYYY-MM-DD". + value: 0.00837 + value_unit: "€" + base_unit: "kWh" + vat_id: normal diff --git a/tests/test_bridge.py b/tests/test_bridge.py index 2615abf..e85f62f 100644 --- a/tests/test_bridge.py +++ b/tests/test_bridge.py @@ -2,8 +2,8 @@ import pytest -from gazpar2haws import config_utils from gazpar2haws.bridge import Bridge +from gazpar2haws.configuration import Configuration # ---------------------------------- @@ -12,11 +12,7 @@ async def test_run(): # Load configuration - config = config_utils.ConfigLoader( - "tests/config/configuration.yaml", "tests/config/secrets.yaml" - ) - config.load_secrets() - config.load_config() + config = Configuration.load("tests/config/configuration.yaml", "tests/config/secrets.yaml") # pylint: disable=W0201 bridge = Bridge(config) await bridge.run() diff --git a/tests/test_configuration.py b/tests/test_configuration.py new file mode 100644 index 0000000..c803a9a --- /dev/null +++ b/tests/test_configuration.py @@ -0,0 +1,10 @@ +"""Test the configuration module.""" + +from gazpar2haws.configuration import Configuration + + +def test_configuration(): + + config = Configuration.load("tests/config/configuration.yaml", "tests/config/secrets.yaml") + + assert config.logging.level == "debug" diff --git a/tests/test_date_array.py b/tests/test_date_array.py new file mode 100644 index 0000000..2a921cd --- /dev/null +++ b/tests/test_date_array.py @@ -0,0 +1,50 @@ +"""Test the date_array module.""" + +from datetime import date + +from gazpar2haws.date_array import DateArray + + +def test_date_array(): + + date_array = DateArray(start_date=date(2021, 1, 1), end_date=date(2021, 1, 31)) + + assert len(date_array) == 31 + + assert date_array.is_aligned_with(date_array) + + date_array2 = DateArray(start_date=date(2021, 1, 1), end_date=date(2021, 1, 31)) + + assert date_array.is_aligned_with(date_array2) + + date_array3 = DateArray(start_date=date(2021, 1, 1), end_date=date(2021, 1, 30)) + + assert not date_array.is_aligned_with(date_array3) + + date_array4 = DateArray(start_date=date(2021, 1, 1), end_date=date(2021, 1, 31), initial_value=1) + + date_array5 = date_array + date_array4 + + assert len(date_array5) == 31 + + date_array6 = date_array - date_array4 + + assert len(date_array6) == 31 + + date_array7 = date_array * date_array4 + + assert len(date_array7) == 31 + + date_array8 = date_array / date_array4 + + assert len(date_array8) == 31 + + date_array9 = date_array + 1 + + for i in range(31): + assert date_array9[i] == 1 # pylint: disable=unsubscriptable-object + + date_array10 = date_array9 * 5 + + for i in range(31): + assert date_array10[i] == 5 diff --git a/tests/test_gazpar.py b/tests/test_gazpar.py index 1f357c0..2678a44 100644 --- a/tests/test_gazpar.py +++ b/tests/test_gazpar.py @@ -1,10 +1,20 @@ """Test gazpar module.""" +from datetime import date + +import pygazpar # type: ignore import pytest -from gazpar2haws import config_utils +from gazpar2haws.configuration import Configuration from gazpar2haws.gazpar import Gazpar from gazpar2haws.haws import HomeAssistantWS +from gazpar2haws.model import ( + ConsumptionQuantityArray, + PriceUnit, + QuantityUnit, + TimeUnit, +) +from gazpar2haws.pricer import Pricer # ---------------------------------- @@ -15,39 +25,127 @@ def setup_method(self): # pylint: disable=R0801 """setup any state tied to the execution of the given method in a class. setup_method is invoked for every test method of a class. """ + # Load configuration - self._config = config_utils.ConfigLoader( # pylint: disable=W0201 + self._config = Configuration.load( # pylint: disable=W0201 "tests/config/configuration.yaml", "tests/config/secrets.yaml" ) - self._config.load_secrets() - self._config.load_config() - - ha_host = self._config.get("homeassistant.host") - ha_port = self._config.get("homeassistant.port") - ha_endpoint = ( - self._config.get("homeassistant.endpoint") - if self._config.get("homeassistant.endpoint") - else "/api/websocket" - ) - ha_token = self._config.get("homeassistant.token") - self._haws = HomeAssistantWS( # pylint: disable=W0201 - ha_host, ha_port, ha_endpoint, ha_token - ) + ha_host = self._config.homeassistant.host + ha_port = self._config.homeassistant.port + ha_endpoint = self._config.homeassistant.endpoint + ha_token = self._config.homeassistant.token.get_secret_value() - self._grdf_device_config = self._config.get( # pylint: disable=W0201 - "grdf.devices" - )[0] + self._haws = HomeAssistantWS(ha_host, ha_port, ha_endpoint, ha_token) # pylint: disable=W0201 + self._grdf_device_config = self._config.grdf.devices[0] # pylint: disable=W0201 + self._pricing_config = self._config.pricing # pylint: disable=W0201 # ---------------------------------- # @pytest.mark.skip(reason="Requires Home Assistant server") @pytest.mark.asyncio async def test_publish(self): - gazpar = Gazpar(self._grdf_device_config, self._haws) + gazpar = Gazpar(self._grdf_device_config, self._pricing_config, self._haws) await self._haws.connect() await gazpar.publish() await self._haws.disconnect() + + # ---------------------------------- + def test_fetch_daily_gazpar_history(self): + + gazpar = Gazpar(self._grdf_device_config, self._pricing_config, self._haws) + + start_date = date(2019, 6, 1) + end_date = date(2019, 6, 30) + + daily_history = gazpar.fetch_daily_gazpar_history(start_date, end_date) + + assert daily_history is not None and len(daily_history) > 0 + + # ---------------------------------- + @pytest.mark.asyncio + async def test_find_last_date_and_value(self): + + gazpar = Gazpar(self._grdf_device_config, self._pricing_config, self._haws) + + await self._haws.connect() + + last_date, last_value = await gazpar.find_last_date_and_value("sensor.gazpar2haws_test") + + assert last_date is not None + assert last_value is not None + + await self._haws.disconnect() + + # ---------------------------------- + @pytest.mark.asyncio + async def test_push_energy_date_array(self): + + gazpar = Gazpar(self._grdf_device_config, self._pricing_config, self._haws) + + await self._haws.connect() + + start_date = date(2019, 6, 1) + end_date = date(2019, 6, 30) + + # Fetch the data from GrDF and publish it to Home Assistant + daily_history = gazpar.fetch_daily_gazpar_history(start_date, end_date) + + # Extract the energy from the daily history + energy_array = gazpar.extract_property_from_daily_gazpar_history( + daily_history, pygazpar.PropertyName.ENERGY.value, start_date, end_date + ) + + await gazpar.publish_date_array("sensor.gazpar2haws_test", "kWh", energy_array, 0) + + await self._haws.disconnect() + + # ---------------------------------- + @pytest.mark.asyncio + async def test_push_cost_date_array(self): + + gazpar = Gazpar(self._grdf_device_config, self._pricing_config, self._haws) + + await self._haws.connect() + + start_date = date(2019, 6, 1) + end_date = date(2019, 6, 30) + + # Fetch the data from GrDF and publish it to Home Assistant + daily_history = gazpar.fetch_daily_gazpar_history(start_date, end_date) + + # Extract the energy from the daily history + energy_array = gazpar.extract_property_from_daily_gazpar_history( + daily_history, pygazpar.PropertyName.ENERGY.value, start_date, end_date + ) + + # Compute the cost from the energy + quantities = ConsumptionQuantityArray( + start_date=start_date, + end_date=end_date, + value_unit=QuantityUnit.KWH, + base_unit=TimeUnit.DAY, + value_array=energy_array, + ) + + # Compute the cost + if energy_array is not None: + pricer = Pricer(self._pricing_config) + + cost_array = pricer.compute(quantities, PriceUnit.EURO) + else: + cost_array = None + + await gazpar.publish_date_array("sensor.gazpar2haws_energy_test", "kWh", energy_array, 0) + + await gazpar.publish_date_array( + "sensor.gazpar2haws_cost_test", + cost_array.value_unit, + cost_array.value_array, + 0, + ) + + await self._haws.disconnect() diff --git a/tests/test_haws.py b/tests/test_haws.py index 9d71b9a..41f1472 100644 --- a/tests/test_haws.py +++ b/tests/test_haws.py @@ -34,9 +34,7 @@ def setup_method(self): ) ha_token = self._config.get("homeassistant.token") - self._haws = HomeAssistantWS( # pylint: disable=W0201 - ha_host, ha_port, ha_endpoint, ha_token - ) + self._haws = HomeAssistantWS(ha_host, ha_port, ha_endpoint, ha_token) # pylint: disable=W0201 # ---------------------------------- # @pytest.mark.skip(reason="Requires Home Assistant server") @@ -67,9 +65,7 @@ async def test_exists_statistic_id(self): await self._haws.connect() - exists_statistic_id = await self._haws.exists_statistic_id( - "sensor.gazpar2haws_volume" - ) + exists_statistic_id = await self._haws.exists_statistic_id("sensor.gazpar2haws_volume") assert exists_statistic_id is not None @@ -82,9 +78,7 @@ async def test_get_last_statistic(self): await self._haws.connect() - statistics = await self._haws.get_last_statistic( - "sensor.gazpar2haws_volume", datetime.now(), 30 - ) + statistics = await self._haws.get_last_statistic("sensor.gazpar2haws_volume", datetime.now(), 30) assert statistics is not None @@ -103,9 +97,7 @@ async def test_import_statistics(self): {"start": "2020-12-16T00:00:00+00:00", "state": 300.0, "sum": 300.0}, ] - await self._haws.import_statistics( - "sensor.gazpar2haws_volume", "recorder", "test", "m³", statistics - ) + await self._haws.import_statistics("sensor.gazpar2haws_volume", "recorder", "test", "m³", statistics) await self._haws.disconnect() @@ -116,8 +108,6 @@ async def test_clear_statistics(self): await self._haws.connect() - await self._haws.clear_statistics( - ["sensor.gazpar2haws_energy", "sensor.gazpar2haws_volume"] - ) + await self._haws.clear_statistics(["sensor.gazpar2haws_energy", "sensor.gazpar2haws_volume"]) await self._haws.disconnect() diff --git a/tests/test_main.py b/tests/test_main.py index 92ba292..068f0b3 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -8,7 +8,6 @@ # ---------------------------------- -# @pytest.mark.skip(reason="Requires Home Assistant server") @pytest.mark.asyncio async def test_main(): diff --git a/tests/test_pricer.py b/tests/test_pricer.py new file mode 100644 index 0000000..36f0bb2 --- /dev/null +++ b/tests/test_pricer.py @@ -0,0 +1,542 @@ +"""Test pricer module.""" + +import math +from datetime import date + +from gazpar2haws.configuration import Configuration +from gazpar2haws.model import ( + ConsumptionQuantityArray, + DateArray, + PriceUnit, + QuantityUnit, + TimeUnit, + VatRateArray, +) +from gazpar2haws.pricer import Pricer + + +# ---------------------------------- +class TestPricer: # pylint: disable=R0904 + + # ---------------------------------- + def setup_method(self): + + # Load configuration + config = Configuration.load("tests/config/configuration.yaml", "tests/config/secrets.yaml") + + self._pricer = Pricer(config.pricing) # pylint: disable=W0201 + + # ---------------------------------- + def test_get_consumption_price_array_inside(self): + + start_date = date(2023, 8, 20) + end_date = date(2023, 8, 25) + + vat_rate_array_by_id = { + "reduced": VatRateArray(id="reduced", start_date=start_date, end_date=end_date), + "normal": VatRateArray(id="normal", start_date=start_date, end_date=end_date), + } + + consumption_price_array = Pricer.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=self._pricer.pricing_data().consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + assert consumption_price_array.start_date == start_date + assert consumption_price_array.end_date == end_date + assert consumption_price_array.value_unit == "€" + assert consumption_price_array.base_unit == "kWh" + assert consumption_price_array.vat_id == "normal" + assert len(consumption_price_array.value_array) == 6 + assert consumption_price_array.value_array[start_date] == 0.05568 + assert consumption_price_array.value_array[end_date] == 0.05568 + + # ---------------------------------- + def test_get_consumption_price_array_accross_middle(self): + + start_date = date(2023, 8, 20) + end_date = date(2023, 9, 5) + + vat_rate_array_by_id = { + "reduced": VatRateArray(id="reduced", start_date=start_date, end_date=end_date), + "normal": VatRateArray(id="normal", start_date=start_date, end_date=end_date), + } + + consumption_price_array = Pricer.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=self._pricer.pricing_data().consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + assert consumption_price_array.start_date == start_date + assert consumption_price_array.end_date == end_date + assert consumption_price_array.value_unit == "€" + assert consumption_price_array.base_unit == "kWh" + assert consumption_price_array.vat_id == "normal" + assert len(consumption_price_array.value_array) == 17 + assert consumption_price_array.value_array[start_date] == 0.05568 + assert consumption_price_array.value_array[end_date] == 0.05412 + + # ---------------------------------- + def test_get_consumption_price_array_accross_start(self): + + start_date = date(2023, 5, 25) + end_date = date(2023, 6, 5) + + vat_rate_array_by_id = { + "reduced": VatRateArray(id="reduced", start_date=start_date, end_date=end_date), + "normal": VatRateArray(id="normal", start_date=start_date, end_date=end_date), + } + + consumption_price_array = Pricer.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=self._pricer.pricing_data().consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + assert consumption_price_array.start_date == start_date + assert consumption_price_array.end_date == end_date + assert consumption_price_array.value_unit == "€" + assert consumption_price_array.base_unit == "kWh" + assert consumption_price_array.vat_id == "normal" + assert len(consumption_price_array.value_array) == 12 + assert consumption_price_array.value_array[start_date] == 0.07790 + assert consumption_price_array.value_array[end_date] == 0.07790 + + # ---------------------------------- + def test_get_consumption_price_array_accross_end(self): + + start_date = date(2024, 12, 25) + end_date = date(2025, 1, 5) + + vat_rate_array_by_id = { + "reduced": VatRateArray(id="reduced", start_date=start_date, end_date=end_date), + "normal": VatRateArray(id="normal", start_date=start_date, end_date=end_date), + } + + consumption_price_array = Pricer.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=self._pricer.pricing_data().consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + assert consumption_price_array.start_date == start_date + assert consumption_price_array.end_date == end_date + assert consumption_price_array.value_unit == "€" + assert consumption_price_array.base_unit == "kWh" + assert consumption_price_array.vat_id == "normal" + assert len(consumption_price_array.value_array) == 12 + assert consumption_price_array.value_array[start_date] == 0.04842 + assert consumption_price_array.value_array[end_date] == 0.07807 + + # ---------------------------------- + def test_get_consumption_price_array_outside(self): + + start_date = date(2023, 7, 20) + end_date = date(2023, 9, 5) + + vat_rate_array_by_id = { + "reduced": VatRateArray(id="reduced", start_date=start_date, end_date=end_date), + "normal": VatRateArray(id="normal", start_date=start_date, end_date=end_date), + } + + consumption_price_array = Pricer.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=self._pricer.pricing_data().consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + assert consumption_price_array.start_date == start_date + assert consumption_price_array.end_date == end_date + assert consumption_price_array.value_unit == "€" + assert consumption_price_array.base_unit == "kWh" + assert consumption_price_array.vat_id == "normal" + assert len(consumption_price_array.value_array) == 48 + assert consumption_price_array.value_array[start_date] == 0.05392 + assert consumption_price_array.value_array[end_date] == 0.05412 + + # ---------------------------------- + def test_get_consumption_price_array_before(self): + + start_date = date(2023, 5, 1) + end_date = date(2023, 5, 5) + + vat_rate_array_by_id = { + "reduced": VatRateArray(id="reduced", start_date=start_date, end_date=end_date), + "normal": VatRateArray(id="normal", start_date=start_date, end_date=end_date), + } + + consumption_price_array = Pricer.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=self._pricer.pricing_data().consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + assert consumption_price_array.start_date == start_date + assert consumption_price_array.end_date == end_date + assert consumption_price_array.value_unit == "€" + assert consumption_price_array.base_unit == "kWh" + assert consumption_price_array.vat_id == "normal" + assert len(consumption_price_array.value_array) == 5 + assert consumption_price_array.value_array[start_date] == 0.07790 + assert consumption_price_array.value_array[end_date] == 0.07790 + + # ---------------------------------- + def test_get_consumption_price_array_after(self): + + start_date = date(2025, 5, 1) + end_date = date(2025, 5, 5) + + vat_rate_array_by_id = { + "reduced": VatRateArray(id="reduced", start_date=start_date, end_date=end_date), + "normal": VatRateArray(id="normal", start_date=start_date, end_date=end_date), + } + + consumption_price_array = Pricer.get_consumption_price_array( + start_date=start_date, + end_date=end_date, + consumption_prices=self._pricer.pricing_data().consumption_prices, + vat_rate_array_by_id=vat_rate_array_by_id, + ) + + assert consumption_price_array.start_date == start_date + assert consumption_price_array.end_date == end_date + assert consumption_price_array.value_unit == "€" + assert consumption_price_array.base_unit == "kWh" + assert consumption_price_array.vat_id == "normal" + assert len(consumption_price_array.value_array) == 5 + assert consumption_price_array.value_array[start_date] == 0.07807 + assert consumption_price_array.value_array[end_date] == 0.07807 + + # ---------------------------------- + def test_get_vat_rate_array_by_id(self): + + start_date = date(2023, 8, 20) + end_date = date(2023, 8, 25) + + vat_rate_array_by_id = Pricer.get_vat_rate_array_by_id( + start_date=start_date, + end_date=end_date, + vat_rates=self._pricer.pricing_data().vat, + ) + + assert len(vat_rate_array_by_id) == 2 + assert vat_rate_array_by_id.get("reduced") is not None + assert vat_rate_array_by_id.get("normal") is not None + assert vat_rate_array_by_id.get("reduced").start_date == start_date + assert vat_rate_array_by_id.get("reduced").end_date == end_date + assert len(vat_rate_array_by_id.get("reduced").value_array) == 6 + assert vat_rate_array_by_id.get("reduced").value_array[start_date] == 0.055 + assert vat_rate_array_by_id.get("reduced").value_array[end_date] == 0.055 + assert vat_rate_array_by_id.get("normal").start_date == start_date + assert vat_rate_array_by_id.get("normal").end_date == end_date + assert len(vat_rate_array_by_id.get("normal").value_array) == 6 + assert vat_rate_array_by_id.get("normal").value_array[start_date] == 0.2 + assert vat_rate_array_by_id.get("normal").value_array[end_date] == 0.2 + + # ---------------------------------- + def test_get_time_unit_convertion_factor(self): + + dt = date(2023, 8, 20) + + assert math.isclose(Pricer.get_time_unit_convertion_factor(TimeUnit.YEAR, TimeUnit.MONTH, dt), 12, rel_tol=1e-6) + assert math.isclose( + Pricer.get_time_unit_convertion_factor(TimeUnit.MONTH, TimeUnit.YEAR, dt), 1 / 12, rel_tol=1e-6 + ) + assert math.isclose(Pricer.get_time_unit_convertion_factor(TimeUnit.YEAR, TimeUnit.DAY, dt), 365, rel_tol=1e-6) + assert math.isclose( + Pricer.get_time_unit_convertion_factor(TimeUnit.DAY, TimeUnit.YEAR, dt), 1 / 365, rel_tol=1e-6 + ) + assert math.isclose(Pricer.get_time_unit_convertion_factor(TimeUnit.MONTH, TimeUnit.DAY, dt), 31, rel_tol=1e-6) + assert math.isclose( + Pricer.get_time_unit_convertion_factor(TimeUnit.DAY, TimeUnit.MONTH, dt), 1 / 31, rel_tol=1e-6 + ) + + # ---------------------------------- + def test_get_price_unit_convertion_factor(self): + + assert math.isclose( + Pricer.get_price_unit_convertion_factor(PriceUnit.EURO, PriceUnit.CENT), 100.0, rel_tol=1e-6 + ) + assert math.isclose(Pricer.get_price_unit_convertion_factor(PriceUnit.CENT, PriceUnit.EURO), 0.01, rel_tol=1e-6) + + # ---------------------------------- + def test_get_quantity_unit_convertion_factor(self): + + assert math.isclose( + Pricer.get_quantity_unit_convertion_factor(QuantityUnit.KWH, QuantityUnit.MWH), 0.001, rel_tol=1e-6 + ) + assert math.isclose( + Pricer.get_quantity_unit_convertion_factor(QuantityUnit.MWH, QuantityUnit.KWH), 1000.0, rel_tol=1e-6 + ) + assert math.isclose( + Pricer.get_quantity_unit_convertion_factor(QuantityUnit.WH, QuantityUnit.KWH), 0.001, rel_tol=1e-6 + ) + assert math.isclose( + Pricer.get_quantity_unit_convertion_factor(QuantityUnit.KWH, QuantityUnit.WH), 1000.0, rel_tol=1e-6 + ) + assert math.isclose( + Pricer.get_quantity_unit_convertion_factor(QuantityUnit.WH, QuantityUnit.MWH), 0.000001, rel_tol=1e-6 + ) + assert math.isclose( + Pricer.get_quantity_unit_convertion_factor(QuantityUnit.MWH, QuantityUnit.WH), 1000000.0, rel_tol=1e-6 + ) + + # ---------------------------------- + def test_get_convertion_factor(self): + + dt = date(2023, 8, 20) + + euro_per_kwh = (PriceUnit.EURO, QuantityUnit.KWH) + cent_per_kwh = (PriceUnit.CENT, QuantityUnit.KWH) + euro_per_mwh = (PriceUnit.EURO, QuantityUnit.MWH) + cent_per_mwh = (PriceUnit.CENT, QuantityUnit.MWH) + + euro_per_year = (PriceUnit.EURO, TimeUnit.YEAR) + cent_per_year = (PriceUnit.CENT, TimeUnit.YEAR) + euro_per_month = (PriceUnit.EURO, TimeUnit.MONTH) + cent_per_month = (PriceUnit.CENT, TimeUnit.MONTH) + euro_per_day = (PriceUnit.EURO, TimeUnit.DAY) + cent_per_day = (PriceUnit.CENT, TimeUnit.DAY) + + assert math.isclose(Pricer.get_convertion_factor(euro_per_kwh, euro_per_kwh), 1.0, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(euro_per_kwh, cent_per_kwh), 100.0, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(cent_per_kwh, euro_per_kwh), 0.01, rel_tol=1e-6) + + assert math.isclose(Pricer.get_convertion_factor(euro_per_kwh, euro_per_mwh), 1000.0, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(euro_per_mwh, euro_per_kwh), 0.001, rel_tol=1e-6) + + assert math.isclose(Pricer.get_convertion_factor(cent_per_mwh, euro_per_kwh), 0.00001, rel_tol=1e-6) + + assert math.isclose(Pricer.get_convertion_factor(euro_per_year, euro_per_month, dt), 1 / 12, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(euro_per_month, euro_per_year, dt), 12, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(euro_per_year, euro_per_day, dt), 1 / 365, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(euro_per_day, euro_per_year, dt), 365, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(euro_per_month, euro_per_day, dt), 1 / 31, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(euro_per_day, euro_per_month, dt), 31, rel_tol=1e-6) + + assert math.isclose(Pricer.get_convertion_factor(cent_per_year, cent_per_month, dt), 1 / 12, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(cent_per_month, cent_per_year, dt), 12, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(cent_per_year, cent_per_day, dt), 1 / 365, rel_tol=1e-6) + assert math.isclose(Pricer.get_convertion_factor(cent_per_day, cent_per_year, dt), 365, rel_tol=1e-6) + + # ---------------------------------- + def test_convert(self): + + consumption_prices = self._pricer.pricing_data().consumption_prices + + converted_prices = Pricer.convert(consumption_prices, (PriceUnit.CENT, QuantityUnit.WH)) + + for i in range(len(consumption_prices) - 1): + consumption_price = consumption_prices[i] + converted_price = converted_prices[i] + + assert converted_price.value_unit == PriceUnit.CENT + assert converted_price.base_unit == QuantityUnit.WH + assert converted_price.value == 0.1 * consumption_price.value + + # ---------------------------------- + def _create_quantities( + self, start_date: date, end_date: date, quantity: float, unit: QuantityUnit + ) -> ConsumptionQuantityArray: + + quantities = ConsumptionQuantityArray( + start_date=start_date, + end_date=end_date, + value_array=DateArray(start_date=start_date, end_date=end_date, initial_value=quantity), + value_unit=unit, + base_unit=TimeUnit.DAY, + ) + + return quantities + + # ---------------------------------- + def test_compute(self): + + start_date = date(2023, 8, 20) + end_date = date(2023, 8, 25) + + quantities = self._create_quantities(start_date, end_date, 1.0, QuantityUnit.KWH) + + cost_array = self._pricer.compute(quantities, PriceUnit.EURO) + + assert cost_array.start_date == start_date + assert cost_array.end_date == end_date + assert cost_array.value_unit == "€" + assert len(cost_array.value_array) == 6 + assert math.isclose(cost_array.value_array[start_date], 0.86912910, rel_tol=1e-6) + assert math.isclose(cost_array.value_array[end_date], 0.86912910, rel_tol=1e-6) + + # ---------------------------------- + def _compute_cost(self, pricer: Pricer, single_date: date, quantity: float, unit: QuantityUnit) -> float: + + # Prepare the quantities + quantities = self._create_quantities(single_date, single_date, quantity, unit) + + # Compute the cost + cost_array = pricer.compute(quantities, PriceUnit.EURO) + + if cost_array.value_array is not None: + return cost_array.value_array[single_date] + + return 0.0 + + # ---------------------------------- + def test_example_1(self): + + # Load configuration + config = Configuration.load("tests/config/example_1.yaml", "tests/config/secrets.yaml") + + # Build the pricer + pricer = Pricer(config.pricing) + + # At the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 6, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # Before the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 4, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # After the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 8, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # ---------------------------------- + def test_example_2(self): + + # Load configuration + config = Configuration.load("tests/config/example_2.yaml", "tests/config/secrets.yaml") + + # Build the pricer + pricer = Pricer(config.pricing) + + # At the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 6, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # Before the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 4, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # After the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 8, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # ---------------------------------- + def test_example_3(self): + + # Load configuration + config = Configuration.load("tests/config/example_3.yaml", "tests/config/secrets.yaml") + + # Build the pricer + pricer = Pricer(config.pricing) + + # At the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 6, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # Before the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 4, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # After the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 8, 1), 1.0, QuantityUnit.KWH), 0.0779, rel_tol=1e-6) + + # At the date. + assert math.isclose(self._compute_cost(pricer, date(2024, 1, 1), 1.0, QuantityUnit.KWH), 0.06888, rel_tol=1e-6) + + # Before the date. + assert math.isclose(self._compute_cost(pricer, date(2024, 11, 1), 1.0, QuantityUnit.KWH), 0.06888, rel_tol=1e-6) + + # After the date. + assert math.isclose(self._compute_cost(pricer, date(2024, 3, 1), 1.0, QuantityUnit.KWH), 0.06888, rel_tol=1e-6) + + # ---------------------------------- + def test_example_4(self): + + # Load configuration + config = Configuration.load("tests/config/example_4.yaml", "tests/config/secrets.yaml") + + # Build the pricer + pricer = Pricer(config.pricing) + + # At the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 6, 1), 10.0, QuantityUnit.KWH), 0.9348, rel_tol=1e-6) + + # Before the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 4, 1), 10.0, QuantityUnit.KWH), 0.9348, rel_tol=1e-6) + + # After the date. + assert math.isclose(self._compute_cost(pricer, date(2023, 8, 1), 10.0, QuantityUnit.KWH), 0.9348, rel_tol=1e-6) + + # ---------------------------------- + def test_example_5(self): + + # Load configuration + config = Configuration.load("tests/config/example_5.yaml", "tests/config/secrets.yaml") + + # Build the pricer + pricer = Pricer(config.pricing) + + # At the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 6, 1), 58.0, QuantityUnit.KWH), 6.119195, rel_tol=1e-6 + ) + + # Before the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 4, 1), 58.0, QuantityUnit.KWH), 6.119195, rel_tol=1e-6 + ) + + # After the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 8, 1), 58.0, QuantityUnit.KWH), 6.119195, rel_tol=1e-6 + ) + + # ---------------------------------- + def test_example_6(self): + + # Load configuration + config = Configuration.load("tests/config/example_6.yaml", "tests/config/secrets.yaml") + + # Build the pricer + pricer = Pricer(config.pricing) + + # At the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 6, 1), 372.0, QuantityUnit.KWH), 34.87393, rel_tol=1e-6 + ) + + # Before the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 4, 1), 372.0, QuantityUnit.KWH), 34.87393, rel_tol=1e-6 + ) + + # After the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 8, 1), 372.0, QuantityUnit.KWH), 34.87393, rel_tol=1e-6 + ) + + # ---------------------------------- + def test_example_7(self): + + # Load configuration + config = Configuration.load("tests/config/example_7.yaml", "tests/config/secrets.yaml") + + # Build the pricer + pricer = Pricer(config.pricing) + + # At the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 6, 1), 1476.0, QuantityUnit.KWH), 152.8014, rel_tol=1e-6 + ) + + # Before the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 4, 1), 1476.0, QuantityUnit.KWH), 152.8014, rel_tol=1e-6 + ) + + # After the date. + assert math.isclose( + self._compute_cost(pricer, date(2023, 8, 1), 1476.0, QuantityUnit.KWH), 152.8014, rel_tol=1e-6 + )