diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bb73193c..855f10ab 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,13 @@ on: jobs: ci: - runs-on: ubuntu-20.04 + runs-on: ${{ matrix.os }} + + strategy: + max-parallel: 9 + matrix: + os: [ windows-latest, ubuntu-20.04, ubuntu-22.04 ] + steps: - name: Checkout uses: actions/checkout@v4 @@ -14,44 +20,17 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.9 - cache: pip - cache-dependency-path: | - requirements.txt - requirements-dev.txt + python-version: 3.11 - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r requirements.txt -r requirements-dev.txt - - - name: Check formatting - run: ruff format --check src tests - - - name: Check typing - run: | - python -m mypy - - - name: Test - run: | - pytest --cov src --cov-report xml tests/antares + pip install tox~=4.21.2 + pip install tox-uv~=1.11.3 - - name: Archive code coverage results - uses: actions/upload-artifact@v4 - with: - name: python-code-coverage-report - path: coverage.xml + - name: Performs Ubuntu tests + if: matrix.os != 'windows-latest' + run: tox -p - sonarcloud: - runs-on: ubuntu-20.04 - needs: [ci] - steps: - - uses: actions/checkout@v4 - - name: Download python coverage report - uses: actions/download-artifact@v4 - with: - name: python-code-coverage-report - - name: SonarCloud Scan - uses: sonarsource/sonarcloud-github-action@v2.3.0 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + - name: Performs Windows tests + if: matrix.os == 'windows-latest' + run: tox -e 3.9-test diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml new file mode 100644 index 00000000..d8b8fac7 --- /dev/null +++ b/.github/workflows/coverage.yml @@ -0,0 +1,48 @@ +name: Coverage +on: + push: + branches: + - "**" + +jobs: + coverage: + runs-on: ubuntu-22.04 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.11 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox~=4.21.2 + pip install tox-uv~=1.11.3 + + - name: Performs coverage + run: tox -e coverage + + - name: Archive code coverage results + uses: actions/upload-artifact@v4 + with: + name: python-code-coverage-report + path: coverage.xml + + sonarcloud: + runs-on: ubuntu-22.04 + needs: [coverage] + + steps: + - uses: actions/checkout@v4 + - name: Download python coverage report + uses: actions/download-artifact@v4 + with: + name: python-code-coverage-report + - name: SonarCloud Scan + uses: sonarsource/sonarcloud-github-action@v2.3.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/.github/workflows/license_header.yml b/.github/workflows/license_header.yml deleted file mode 100644 index e1dd3b97..00000000 --- a/.github/workflows/license_header.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: check license headers -on: - push: - branches: - - "**" - -jobs: - check-license-headers: - runs-on: ubuntu-20.04 - steps: - - name: Checkout github repo (+ download lfs dependencies) - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: 3.9 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install click - - name: Check licenses header - run: | - python license_checker_and_adder.py --path=../src/ --action=check-strict - python license_checker_and_adder.py --path=../tests/ --action=check-strict - working-directory: scripts diff --git a/src/antares/model/load.py b/src/antares/model/load.py index 49a681ce..45df850f 100644 --- a/src/antares/model/load.py +++ b/src/antares/model/load.py @@ -9,32 +9,10 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. -from pathlib import Path -from typing import Optional -import pandas as pd -from antares.tools.prepro_folder import PreproFolder -from antares.tools.time_series_tool import TimeSeries, TimeSeriesFile +from antares.tools.time_series_tool import TimeSeries -class Load: - def __init__( - self, - time_series: pd.DataFrame = pd.DataFrame([]), - local_file: Optional[TimeSeriesFile] = None, - study_path: Optional[Path] = None, - area_id: Optional[str] = None, - ) -> None: - self._time_series = TimeSeries(time_series, local_file) - self._prepro = ( - PreproFolder(folder="load", study_path=study_path, area_id=area_id) if study_path and area_id else None - ) - - @property - def time_series(self) -> TimeSeries: - return self._time_series - - @property - def prepro(self) -> Optional[PreproFolder]: - return self._prepro +class Load(TimeSeries): + pass diff --git a/src/antares/model/solar.py b/src/antares/model/solar.py index fece9ea5..57aca133 100644 --- a/src/antares/model/solar.py +++ b/src/antares/model/solar.py @@ -9,32 +9,10 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. -from pathlib import Path -from typing import Optional -import pandas as pd -from antares.tools.prepro_folder import PreproFolder -from antares.tools.time_series_tool import TimeSeries, TimeSeriesFile +from antares.tools.time_series_tool import TimeSeries -class Solar: - def __init__( - self, - time_series: pd.DataFrame = pd.DataFrame([]), - local_file: Optional[TimeSeriesFile] = None, - study_path: Optional[Path] = None, - area_id: Optional[str] = None, - ) -> None: - self._time_series = TimeSeries(time_series, local_file) - self._prepro = ( - PreproFolder(folder="solar", study_path=study_path, area_id=area_id) if study_path and area_id else None - ) - - @property - def time_series(self) -> TimeSeries: - return self._time_series - - @property - def prepro(self) -> Optional[PreproFolder]: - return self._prepro +class Solar(TimeSeries): + pass diff --git a/src/antares/model/study.py b/src/antares/model/study.py index cce4ed40..50d019b4 100644 --- a/src/antares/model/study.py +++ b/src/antares/model/study.py @@ -269,8 +269,8 @@ def delete(self, children: bool = False) -> None: def _verify_study_already_exists(study_directory: Path) -> None: - if os.path.exists(study_directory): - raise FileExistsError(f"Study {study_directory} already exists.") + if study_directory.exists(): + raise FileExistsError(f"Study {study_directory.name} already exists.") def _create_directory_structure(study_path: Path) -> None: diff --git a/src/antares/model/wind.py b/src/antares/model/wind.py index ee02fab6..6ccd6a81 100644 --- a/src/antares/model/wind.py +++ b/src/antares/model/wind.py @@ -9,32 +9,10 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. -from pathlib import Path -from typing import Optional -import pandas as pd -from antares.tools.prepro_folder import PreproFolder -from antares.tools.time_series_tool import TimeSeries, TimeSeriesFile +from antares.tools.time_series_tool import TimeSeries -class Wind: - def __init__( - self, - time_series: pd.DataFrame = pd.DataFrame([]), - local_file: Optional[TimeSeriesFile] = None, - study_path: Optional[Path] = None, - area_id: Optional[str] = None, - ) -> None: - self._time_series = TimeSeries(time_series, local_file) - self._prepro = ( - PreproFolder(folder="wind", study_path=study_path, area_id=area_id) if study_path and area_id else None - ) - - @property - def time_series(self) -> TimeSeries: - return self._time_series - - @property - def prepro(self) -> Optional[PreproFolder]: - return self._prepro +class Wind(TimeSeries): + pass diff --git a/src/antares/service/local_services/area_local.py b/src/antares/service/local_services/area_local.py index cda51143..efcd5a21 100644 --- a/src/antares/service/local_services/area_local.py +++ b/src/antares/service/local_services/area_local.py @@ -37,7 +37,8 @@ BaseThermalService, ) from antares.tools.ini_tool import IniFile, IniFileTypes -from antares.tools.time_series_tool import TimeSeriesFile, TimeSeriesFileType +from antares.tools.prepro_folder import PreproFolder +from antares.tools.time_series_tool import TimeSeriesFileType def _sets_ini_content() -> ConfigParser: @@ -124,10 +125,13 @@ def create_renewable_cluster( def create_load(self, area: Area, series: Optional[pd.DataFrame]) -> Load: series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.LOAD, self.config.study_path, area_id=area.id, time_series=series - ) - return Load(time_series=series, local_file=local_file, study_path=self.config.study_path, area_id=area.id) + self._write_timeseries(series, TimeSeriesFileType.LOAD, area.id) + PreproFolder.LOAD.save(self.config.study_path, area.id) + return Load(time_series=series) + + def _write_timeseries(self, series: pd.DataFrame, ts_file_type: TimeSeriesFileType, area_id: str) -> None: + file_path = self.config.study_path.joinpath(ts_file_type.value.format(area_id=area_id)) + series.to_csv(file_path, sep="\t", header=False, index=False, encoding="utf-8") def create_st_storage( self, area_id: str, st_storage_name: str, properties: Optional[STStorageProperties] = None @@ -149,31 +153,25 @@ def create_st_storage( def create_wind(self, area: Area, series: Optional[pd.DataFrame]) -> Wind: series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.WIND, self.config.study_path, area_id=area.id, time_series=series - ) - return Wind(time_series=series, local_file=local_file, study_path=self.config.study_path, area_id=area.id) + self._write_timeseries(series, TimeSeriesFileType.WIND, area.id) + PreproFolder.WIND.save(self.config.study_path, area.id) + return Wind(time_series=series) def create_reserves(self, area: Area, series: Optional[pd.DataFrame]) -> Reserves: series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.RESERVES, self.config.study_path, area_id=area.id, time_series=series - ) - return Reserves(series, local_file) + self._write_timeseries(series, TimeSeriesFileType.RESERVES, area.id) + return Reserves(series) def create_solar(self, area: Area, series: Optional[pd.DataFrame]) -> Solar: series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.SOLAR, self.config.study_path, area_id=area.id, time_series=series - ) - return Solar(time_series=series, local_file=local_file, study_path=self.config.study_path, area_id=area.id) + self._write_timeseries(series, TimeSeriesFileType.SOLAR, area.id) + PreproFolder.SOLAR.save(self.config.study_path, area.id) + return Solar(time_series=series) def create_misc_gen(self, area: Area, series: Optional[pd.DataFrame]) -> MiscGen: series = series if series is not None else pd.DataFrame([]) - local_file = TimeSeriesFile( - TimeSeriesFileType.MISC_GEN, self.config.study_path, area_id=area.id, time_series=series - ) - return MiscGen(series, local_file) + self._write_timeseries(series, TimeSeriesFileType.MISC_GEN, area.id) + return MiscGen(series) def create_hydro( self, diff --git a/src/antares/service/local_services/binding_constraint_local.py b/src/antares/service/local_services/binding_constraint_local.py index e0b99e23..0254b25c 100644 --- a/src/antares/service/local_services/binding_constraint_local.py +++ b/src/antares/service/local_services/binding_constraint_local.py @@ -26,7 +26,8 @@ ) from antares.service.base_services import BaseBindingConstraintService from antares.tools.ini_tool import IniFile, IniFileTypes -from antares.tools.time_series_tool import TimeSeries, TimeSeriesFile, TimeSeriesFileType +from antares.tools.matrix_tool import df_save +from antares.tools.time_series_tool import TimeSeriesFileType class BindingConstraintLocalService(BaseBindingConstraintService): @@ -35,7 +36,6 @@ def __init__(self, config: LocalConfiguration, study_name: str, **kwargs: Any) - self.config = config self.study_name = study_name self.ini_file = IniFile(self.config.study_path, IniFileTypes.BINDING_CONSTRAINTS_INI) - self._time_series: dict[str, TimeSeries] = {} self.binding_constraints = {} def create_binding_constraint( @@ -90,10 +90,8 @@ def _store_time_series( file_types = [TimeSeriesFileType.BINDING_CONSTRAINT_EQUAL] for ts, ts_id, file_type in zip(time_series, time_series_ids, file_types): - self._time_series[ts_id] = TimeSeries( - ts, - TimeSeriesFile(file_type, self.config.study_path, constraint_id=constraint.id.lower(), time_series=ts), - ) + matrix_path = self.config.study_path.joinpath(file_type.value.format(constraint_id=constraint.id)) + df_save(ts, matrix_path) @staticmethod def _check_if_empty_ts(time_step: BindingConstraintFrequency, time_series: Optional[pd.DataFrame]) -> pd.DataFrame: @@ -108,10 +106,6 @@ def _write_binding_constraint_ini(self) -> None: self.ini_file.ini_dict = binding_constraints_ini_content self.ini_file.write_ini_file() - @property - def time_series(self) -> dict[str, TimeSeries]: - return self._time_series - def add_constraint_terms(self, constraint: BindingConstraint, terms: list[ConstraintTerm]) -> list[ConstraintTerm]: new_terms = constraint.local_properties.terms | { term.id: term for term in terms if term.id not in constraint.get_terms() diff --git a/src/antares/tools/ini_tool.py b/src/antares/tools/ini_tool.py index 4fde8689..4f3d418e 100644 --- a/src/antares/tools/ini_tool.py +++ b/src/antares/tools/ini_tool.py @@ -13,7 +13,7 @@ from enum import Enum from pathlib import Path -from typing import Any, Optional, Union, overload +from typing import Any, Optional, Union from pydantic import BaseModel @@ -105,12 +105,6 @@ def ini_path(self) -> Path: """Ini path""" return self._full_path - @overload - def add_section(self, section: Path) -> None: ... - - @overload - def add_section(self, section: dict[str, dict[str, str]]) -> None: ... - def add_section(self, section: Any) -> None: if isinstance(section, dict): self._ini_contents.read_dict(section) @@ -118,7 +112,7 @@ def add_section(self, section: Any) -> None: with section.open() as ini_file: self._ini_contents.read_file(ini_file) else: - raise TypeError("Only dict or Path are allowed") + raise TypeError(f"Only dict or Path are allowed, received {type(section)}") def update_from_ini_file(self) -> None: if not self._full_path.is_file(): diff --git a/src/antares/tools/matrix_tool.py b/src/antares/tools/matrix_tool.py index 897764fe..e4753051 100644 --- a/src/antares/tools/matrix_tool.py +++ b/src/antares/tools/matrix_tool.py @@ -9,7 +9,7 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. - +from pathlib import Path from typing import Dict import pandas as pd @@ -28,3 +28,11 @@ def prepare_args_replace_matrix(series: pd.DataFrame, series_path: str) -> Dict: matrix = series.to_numpy().tolist() body = {"target": series_path, "matrix": matrix} return {"action": "replace_matrix", "args": body} + + +def df_save(df: pd.DataFrame, path: Path) -> None: + df.to_csv(path, sep="\t", header=False, index=False, encoding="utf-8") + + +def df_read(path: Path) -> pd.DataFrame: + return pd.read_csv(path, sep="\t", header=None) diff --git a/src/antares/tools/prepro_folder.py b/src/antares/tools/prepro_folder.py index 79df4d19..4cb48af1 100644 --- a/src/antares/tools/prepro_folder.py +++ b/src/antares/tools/prepro_folder.py @@ -9,88 +9,42 @@ # SPDX-License-Identifier: MPL-2.0 # # This file is part of the Antares project. - +from enum import Enum from pathlib import Path import numpy as np import pandas as pd from antares.tools.ini_tool import IniFile, IniFileTypes -from antares.tools.time_series_tool import TimeSeries, TimeSeriesFile, TimeSeriesFileType - - -class PreproFolder: - def __init__(self, folder: str, study_path: Path, area_id: str) -> None: - folders = ["load", "solar", "wind"] - if folder not in folders: - raise ValueError(f"Folder must be one of the following: {', '.join(folders[:-1])}, and {folders[-1]}") - if folder == "solar": - settings = IniFileTypes.SOLAR_SETTINGS_INI - conversion = TimeSeriesFileType.SOLAR_CONVERSION - data = TimeSeriesFileType.SOLAR_DATA - k = TimeSeriesFileType.SOLAR_K - translation = TimeSeriesFileType.SOLAR_TRANSLATION - elif folder == "wind": - settings = IniFileTypes.WIND_SETTINGS_INI - conversion = TimeSeriesFileType.WIND_CONVERSION - data = TimeSeriesFileType.WIND_DATA - k = TimeSeriesFileType.WIND_K - translation = TimeSeriesFileType.WIND_TRANSLATION - elif folder == "load": - settings = IniFileTypes.LOAD_SETTINGS_INI - conversion = TimeSeriesFileType.LOAD_CONVERSION - data = TimeSeriesFileType.LOAD_DATA - k = TimeSeriesFileType.LOAD_K - translation = TimeSeriesFileType.LOAD_TRANSLATION - - self._settings = IniFile(study_path, settings, area_id) - self._conversion = TimeSeries( - ConversionFile().data, - TimeSeriesFile(conversion, study_path, area_id=area_id, time_series=ConversionFile().data), - ) - self._data = TimeSeries( - DataFile().data, TimeSeriesFile(data, study_path, area_id=area_id, time_series=DataFile().data) - ) - self._k = TimeSeries( - pd.DataFrame([]), TimeSeriesFile(k, study_path, area_id=area_id, time_series=pd.DataFrame([])) - ) - self._translation = TimeSeries( - pd.DataFrame([]), - TimeSeriesFile(translation, study_path, area_id=area_id, time_series=pd.DataFrame([])), - ) - - @property - def settings(self) -> IniFile: - return self._settings - - @property - def conversion(self) -> TimeSeries: - return self._conversion - - @property - def data(self) -> TimeSeries: - return self._data - - @property - def k(self) -> TimeSeries: - return self._k - - @property - def translation(self) -> TimeSeries: - return self._translation - - -class ConversionFile: - def __init__(self) -> None: - self.data = pd.DataFrame([[-9999999980506447872, 0, 9999999980506447872], [0, 0, 0]]) - - -class DataFile: - def __init__(self) -> None: - default_data = pd.DataFrame(np.ones([12, 6])) - default_data[2] = 0 - self._data = default_data.astype(int) - - @property - def data(self) -> pd.DataFrame: - return self._data +from antares.tools.matrix_tool import df_save +from antares.tools.time_series_tool import TimeSeriesFileType + + +class PreproFolder(Enum): + LOAD = "load" + SOLAR = "solar" + WIND = "wind" + + def save(self, study_path: Path, area_id: str) -> None: + IniFile(study_path, IniFileTypes.__getitem__(f"{self.value.upper()}_SETTINGS_INI"), area_id) + + conversion = TimeSeriesFileType.__getitem__(f"{self.value.upper()}_CONVERSION").value.format(area_id=area_id) + conversion_path = study_path.joinpath(conversion) + conversion_matrix = pd.DataFrame([[-9999999980506447872, 0, 9999999980506447872], [0, 0, 0]]) + df_save(conversion_matrix, conversion_path) + + data = TimeSeriesFileType.__getitem__(f"{self.value.upper()}_DATA").value.format(area_id=area_id) + data_matrix = pd.DataFrame(np.ones([12, 6]), dtype=int) + data_matrix[2] = 0 + data_path = study_path.joinpath(data) + df_save(data_matrix, data_path) + + k = TimeSeriesFileType.__getitem__(f"{self.value.upper()}_K").value.format(area_id=area_id) + k_path = study_path.joinpath(k) + k_matrix = pd.DataFrame([]) + df_save(k_matrix, k_path) + + translation = TimeSeriesFileType.__getitem__(f"{self.value.upper()}_TRANSLATION").value.format(area_id=area_id) + translation_path = study_path.joinpath(translation) + translation_matrix = pd.DataFrame([]) + df_save(translation_matrix, translation_path) diff --git a/src/antares/tools/time_series_tool.py b/src/antares/tools/time_series_tool.py index 74682a67..10dde334 100644 --- a/src/antares/tools/time_series_tool.py +++ b/src/antares/tools/time_series_tool.py @@ -11,8 +11,6 @@ # This file is part of the Antares project. from enum import Enum -from pathlib import Path -from typing import Optional import pandas as pd @@ -52,95 +50,17 @@ class TimeSeriesFileType(Enum): WIND_TRANSLATION = "input/wind/prepro/{area_id}/translation.txt" -class TimeSeriesFile: - """ - Handling time series files reading and writing locally. - - Time series are stored without headers in tab separated files, encoded with UTF-8. - - Args: - ts_file_type: Type of time series file using the class TimeSeriesFileType. - study_path: `Path` to the study directory. - area_id: Area ID for file paths that use the area's id in their path - constraint_id: Constraint ID for file paths that use the binding constraint's id in their path - time_series: The actual timeseries as a pandas DataFrame. - - Raises: - ValueError if the TimeSeriesFileType needs an area_id and none is provided. - """ - - def __init__( - self, - ts_file_type: TimeSeriesFileType, - study_path: Path, - *, - area_id: Optional[str] = None, - constraint_id: Optional[str] = None, - time_series: Optional[pd.DataFrame] = None, - ) -> None: - if "{area_id}" in ts_file_type.value and area_id is None: - raise ValueError("area_id is required for this file type.") - if "{constraint_id}" in ts_file_type.value and constraint_id is None: - raise ValueError("constraint_id is required for this file type.") - - self.file_path = study_path / ( - ts_file_type.value - if not (area_id or constraint_id) - else ts_file_type.value.format(area_id=area_id, constraint_id=constraint_id) - ) - - if self.file_path.is_file() and time_series is not None: - raise ValueError(f"File {self.file_path} already exists and a time series was provided.") - elif self.file_path.is_file() and time_series is None: - self._time_series = pd.read_csv(self.file_path, sep="\t", header=None, index_col=None, encoding="utf-8") - else: - self._time_series = time_series if time_series is not None else pd.DataFrame([]) - self._write_file() - - @property - def time_series(self) -> pd.DataFrame: - return self._time_series - - @time_series.setter - def time_series(self, time_series: pd.DataFrame) -> None: - self._time_series = time_series - self._write_file() - - def _write_file(self) -> None: - self.file_path.parent.mkdir(parents=True, exist_ok=True) - self._time_series.to_csv(self.file_path, sep="\t", header=False, index=False, encoding="utf-8") - - class TimeSeries: """ A time series for use in Antares Args: time_series: Pandas DataFrame containing the time series. - local_file: TimeSeriesFile to store the time series if the study is local. """ - def __init__( - self, time_series: pd.DataFrame = pd.DataFrame([]), local_file: Optional[TimeSeriesFile] = None - ) -> None: + def __init__(self, time_series: pd.DataFrame = pd.DataFrame([])) -> None: self._time_series = time_series - self._local_file = local_file @property def time_series(self) -> pd.DataFrame: return self._time_series - - @time_series.setter - def time_series(self, time_series: pd.DataFrame) -> None: - self._time_series = time_series - if self._local_file is not None: - self._local_file.time_series = time_series - - @property - def local_file(self) -> Optional[TimeSeriesFile]: - return self._local_file - - @local_file.setter - def local_file(self, local_file: TimeSeriesFile) -> None: - self._local_file = local_file - self._time_series = local_file.time_series diff --git a/tests/antares/services/local_services/test_area.py b/tests/antares/services/local_services/test_area.py index f2f2bf5b..d200482a 100644 --- a/tests/antares/services/local_services/test_area.py +++ b/tests/antares/services/local_services/test_area.py @@ -10,12 +10,16 @@ # # This file is part of the Antares project. +import typing as t + from configparser import ConfigParser from io import StringIO +from pathlib import Path import numpy as np import pandas as pd +from antares.config.local_configuration import LocalConfiguration from antares.model.hydro import Hydro from antares.model.renewable import ( RenewableCluster, @@ -754,7 +758,6 @@ def test_settings_ini_exists(self, area_fr, fr_wind): # Then assert expected_ini_path.exists() assert expected_ini_path.is_file() - assert expected_ini_path == fr_wind.prepro.settings.ini_path def test_conversion_txt_exists(self, area_fr, fr_wind): # Given @@ -765,9 +768,8 @@ def test_conversion_txt_exists(self, area_fr, fr_wind): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_wind.prepro.conversion.local_file.file_path == expected_file_path - def test_conversion_txt_has_correct_default_values(self, area_fr, fr_wind): + def test_conversion_txt_has_correct_default_values(self, local_study, fr_wind): # Given expected_file_contents = """-9999999980506447872\t0\t9999999980506447872 0\t0\t0 @@ -776,13 +778,13 @@ def test_conversion_txt_has_correct_default_values(self, area_fr, fr_wind): expected_file_data = pd.read_csv(StringIO(expected_file_contents), sep="\t", header=None).astype(str) # When - with fr_wind.prepro.conversion.local_file.file_path.open("r") as fr_wind_file: - actual_file_contents = fr_wind_file.read() - actual_file_data = fr_wind.prepro.conversion.time_series.astype(str) + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + actual_file_path = study_path.joinpath(Path("input") / "wind" / "prepro" / "fr" / "conversion.txt") + actual_data = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=str) # Then - assert actual_file_data.equals(expected_file_data) - assert actual_file_contents == expected_file_contents + assert actual_data.equals(expected_file_data) def test_data_txt_exists(self, area_fr, fr_wind): # Given @@ -793,33 +795,19 @@ def test_data_txt_exists(self, area_fr, fr_wind): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_wind.prepro.data.local_file.file_path == expected_file_path - def test_data_txt_has_correct_default_values(self, area_fr, fr_wind): + def test_data_txt_has_correct_default_values(self, local_study, fr_wind): # Given - expected_file_contents = """1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -""" - expected_file_data = pd.read_csv(StringIO(expected_file_contents), sep="\t", header=None) - - # When - with fr_wind.prepro.data.local_file.file_path.open("r") as fr_wind_file: - actual_file_contents = fr_wind_file.read() - actual_file_data = fr_wind.prepro.data.time_series + expected_file_data = pd.DataFrame(np.ones([12, 6]), dtype=int) + expected_file_data[2] = 0 # Then - assert actual_file_data.equals(expected_file_data) - assert actual_file_contents == expected_file_contents + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + actual_file_path = study_path.joinpath(Path("input") / "wind" / "prepro" / "fr" / "data.txt") + actual_data = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=int) + # For some reason the equality check fails on windows, so we check it in a different way + assert actual_data.to_dict() == expected_file_data.to_dict() def test_k_txt_exists(self, area_fr, fr_wind): # Given @@ -830,18 +818,13 @@ def test_k_txt_exists(self, area_fr, fr_wind): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_wind.prepro.k.local_file.file_path == expected_file_path - def test_k_txt_is_empty_by_default(self, area_fr, fr_wind): - # Given - expected_file_contents = """""" - - # When - with fr_wind.prepro.k.local_file.file_path.open("r") as fr_wind_file: - actual_file_contents = fr_wind_file.read() - - # Then - assert actual_file_contents == expected_file_contents + def test_k_and_translation_txt_is_empty_by_default(self, local_study, fr_wind): + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + for file in ["k", "translation"]: + actual_file_path = study_path.joinpath(Path("input") / "wind" / "prepro" / "fr" / f"{file}.txt") + assert actual_file_path.read_text() == "" def test_translation_txt_exists(self, area_fr, fr_wind): # Given @@ -852,18 +835,6 @@ def test_translation_txt_exists(self, area_fr, fr_wind): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_wind.prepro.translation.local_file.file_path == expected_file_path - - def test_translation_txt_is_empty_by_default(self, area_fr, fr_wind): - # Given - expected_file_contents = """""" - - # When - with fr_wind.prepro.translation.local_file.file_path.open("r") as fr_wind_file: - actual_file_contents = fr_wind_file.read() - - # Then - assert actual_file_contents == expected_file_contents class TestCreateSolar: @@ -909,7 +880,6 @@ def test_settings_ini_exists(self, area_fr, fr_solar): # Then assert expected_ini_path.exists() assert expected_ini_path.is_file() - assert expected_ini_path == fr_solar.prepro.settings.ini_path def test_conversion_txt_exists(self, area_fr, fr_solar): # Given @@ -920,9 +890,8 @@ def test_conversion_txt_exists(self, area_fr, fr_solar): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_solar.prepro.conversion.local_file.file_path == expected_file_path - def test_conversion_txt_has_correct_default_values(self, area_fr, fr_solar): + def test_conversion_txt_has_correct_default_values(self, local_study, fr_solar): # Given expected_file_contents = """-9999999980506447872\t0\t9999999980506447872 0\t0\t0 @@ -931,13 +900,13 @@ def test_conversion_txt_has_correct_default_values(self, area_fr, fr_solar): expected_file_data = pd.read_csv(StringIO(expected_file_contents), sep="\t", header=None).astype(str) # When - with fr_solar.prepro.conversion.local_file.file_path.open("r") as fr_solar_file: - actual_file_contents = fr_solar_file.read() - actual_file_data = fr_solar.prepro.conversion.time_series.astype(str) + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + actual_file_path = study_path.joinpath(Path("input") / "solar" / "prepro" / "fr" / "conversion.txt") + actual_data = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=str) # Then - assert actual_file_data.equals(expected_file_data) - assert actual_file_contents == expected_file_contents + assert actual_data.equals(expected_file_data) def test_data_txt_exists(self, area_fr, fr_solar): # Given @@ -948,33 +917,19 @@ def test_data_txt_exists(self, area_fr, fr_solar): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_solar.prepro.data.local_file.file_path == expected_file_path - def test_data_txt_has_correct_default_values(self, area_fr, fr_solar): + def test_data_txt_has_correct_default_values(self, local_study, fr_solar): # Given - expected_file_contents = """1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -""" - expected_file_data = pd.read_csv(StringIO(expected_file_contents), sep="\t", header=None) - - # When - with fr_solar.prepro.data.local_file.file_path.open("r") as fr_solar_file: - actual_file_contents = fr_solar_file.read() - actual_file_data = fr_solar.prepro.data.time_series + expected_file_data = pd.DataFrame(np.ones([12, 6]), dtype=int) + expected_file_data[2] = 0 # Then - assert actual_file_data.equals(expected_file_data) - assert actual_file_contents == expected_file_contents + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + actual_file_path = study_path.joinpath(Path("input") / "solar" / "prepro" / "fr" / "data.txt") + actual_data = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=int) + # For some reason the equality check fails on windows, so we check it in a different way + assert actual_data.to_dict() == expected_file_data.to_dict() def test_k_txt_exists(self, area_fr, fr_solar): # Given @@ -985,18 +940,13 @@ def test_k_txt_exists(self, area_fr, fr_solar): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_solar.prepro.k.local_file.file_path == expected_file_path - def test_k_txt_is_empty_by_default(self, area_fr, fr_solar): - # Given - expected_file_contents = """""" - - # When - with fr_solar.prepro.k.local_file.file_path.open("r") as fr_solar_file: - actual_file_contents = fr_solar_file.read() - - # Then - assert actual_file_contents == expected_file_contents + def test_k_and_translation_txt_is_empty_by_default(self, local_study, fr_solar): + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + for file in ["k", "translation"]: + actual_file_path = study_path.joinpath(Path("input") / "solar" / "prepro" / "fr" / f"{file}.txt") + assert actual_file_path.read_text() == "" def test_translation_txt_exists(self, area_fr, fr_solar): # Given @@ -1008,18 +958,6 @@ def test_translation_txt_exists(self, area_fr, fr_solar): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_solar.prepro.translation.local_file.file_path == expected_file_path - - def test_translation_txt_is_empty_by_default(self, area_fr, fr_solar): - # Given - expected_file_contents = """""" - - # When - with fr_solar.prepro.translation.local_file.file_path.open("r") as fr_solar_file: - actual_file_contents = fr_solar_file.read() - - # Then - assert actual_file_contents == expected_file_contents class TestCreateLoad: @@ -1065,7 +1003,6 @@ def test_settings_ini_exists(self, area_fr, fr_load): # Then assert expected_ini_path.exists() assert expected_ini_path.is_file() - assert expected_ini_path == fr_load.prepro.settings.ini_path def test_conversion_txt_exists(self, area_fr, fr_load): # Given @@ -1076,9 +1013,8 @@ def test_conversion_txt_exists(self, area_fr, fr_load): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_load.prepro.conversion.local_file.file_path == expected_file_path - def test_conversion_txt_has_correct_default_values(self, area_fr, fr_load): + def test_conversion_txt_has_correct_default_values(self, local_study, fr_load): # Given expected_file_contents = """-9999999980506447872\t0\t9999999980506447872 0\t0\t0 @@ -1087,13 +1023,13 @@ def test_conversion_txt_has_correct_default_values(self, area_fr, fr_load): expected_file_data = pd.read_csv(StringIO(expected_file_contents), sep="\t", header=None).astype(str) # When - with fr_load.prepro.conversion.local_file.file_path.open("r") as fr_load_file: - actual_file_contents = fr_load_file.read() - actual_file_data = fr_load.prepro.conversion.time_series.astype(str) + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + actual_file_path = study_path.joinpath(Path("input") / "load" / "prepro" / "fr" / "conversion.txt") + actual_data = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=str) # Then - assert actual_file_data.equals(expected_file_data) - assert actual_file_contents == expected_file_contents + assert actual_data.equals(expected_file_data) def test_data_txt_exists(self, area_fr, fr_load): # Given @@ -1104,33 +1040,19 @@ def test_data_txt_exists(self, area_fr, fr_load): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_load.prepro.data.local_file.file_path == expected_file_path - def test_data_txt_has_correct_default_values(self, area_fr, fr_load): + def test_data_txt_has_correct_default_values(self, local_study, fr_load): # Given - expected_file_contents = """1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -1\t1\t0\t1\t1\t1 -""" - expected_file_data = pd.read_csv(StringIO(expected_file_contents), sep="\t", header=None) - - # When - with fr_load.prepro.data.local_file.file_path.open("r") as fr_load_file: - actual_file_contents = fr_load_file.read() - actual_file_data = fr_load.prepro.data.time_series + expected_file_data = pd.DataFrame(np.ones([12, 6]), dtype=int) + expected_file_data[2] = 0 # Then - assert actual_file_data.equals(expected_file_data) - assert actual_file_contents == expected_file_contents + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + actual_file_path = study_path.joinpath(Path("input") / "load" / "prepro" / "fr" / "data.txt") + actual_data = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=int) + # For some reason the equality check fails on windows, so we check it in a different way + assert actual_data.to_dict() == expected_file_data.to_dict() def test_k_txt_exists(self, area_fr, fr_load): # Given @@ -1141,18 +1063,6 @@ def test_k_txt_exists(self, area_fr, fr_load): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_load.prepro.k.local_file.file_path == expected_file_path - - def test_k_txt_is_empty_by_default(self, area_fr, fr_load): - # Given - expected_file_contents = """""" - - # When - with fr_load.prepro.k.local_file.file_path.open("r") as fr_load_file: - actual_file_contents = fr_load_file.read() - - # Then - assert actual_file_contents == expected_file_contents def test_translation_txt_exists(self, area_fr, fr_load): # Given @@ -1163,18 +1073,13 @@ def test_translation_txt_exists(self, area_fr, fr_load): # Then assert expected_file_path.exists() assert expected_file_path.is_file() - assert fr_load.prepro.translation.local_file.file_path == expected_file_path - def test_translation_txt_is_empty_by_default(self, area_fr, fr_load): - # Given - expected_file_contents = """""" - - # When - with fr_load.prepro.translation.local_file.file_path.open("r") as fr_load_file: - actual_file_contents = fr_load_file.read() - - # Then - assert actual_file_contents == expected_file_contents + def test_k_and_translation_txt_is_empty_by_default(self, local_study, fr_load): + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + for file in ["k", "translation"]: + actual_file_path = study_path.joinpath(Path("input") / "load" / "prepro" / "fr" / f"{file}.txt") + assert actual_file_path.read_text() == "" class TestReadArea: diff --git a/tests/antares/services/local_services/test_study.py b/tests/antares/services/local_services/test_study.py index c0f82f56..445490a0 100644 --- a/tests/antares/services/local_services/test_study.py +++ b/tests/antares/services/local_services/test_study.py @@ -15,6 +15,7 @@ import logging import os import time +import typing as t from configparser import ConfigParser from pathlib import Path @@ -84,7 +85,6 @@ from antares.service.local_services.st_storage_local import ShortTermStorageLocalService from antares.service.local_services.thermal_local import ThermalLocalService from antares.tools.ini_tool import IniFileTypes -from antares.tools.time_series_tool import TimeSeriesFileType class TestCreateStudy: @@ -154,22 +154,15 @@ def test_study_antares_content(self, monkeypatch, tmp_path): # Then assert actual_content == antares_content - def test_verify_study_already_exists_error(self, monkeypatch, tmp_path, caplog): + def test_verify_study_already_exists_error(self, tmp_path): # Given study_name = "studyTest" version = "850" - - def mock_verify_study_already_exists(study_directory): - raise FileExistsError(f"Failed to create study. Study {study_directory} already exists") - - monkeypatch.setattr("antares.model.study._verify_study_already_exists", mock_verify_study_already_exists) + (tmp_path / study_name).mkdir(parents=True, exist_ok=True) # When - with caplog.at_level(logging.ERROR): - with pytest.raises( - FileExistsError, match=f"Failed to create study. Study {tmp_path}/{study_name} already exists" - ): - create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) + with pytest.raises(FileExistsError, match=f"Study {study_name} already exists"): + create_study_local(study_name, version, LocalConfiguration(tmp_path, study_name)) def test_solar_correlation_ini_exists(self, local_study_with_hydro): # Given @@ -1562,10 +1555,7 @@ def test_area_ui_ini_content(self, tmp_path, local_study): # Then assert actual_content == ui_ini_content - def test_create_area_with_custom_error(self, monkeypatch, caplog, local_study): - # Given - caplog.set_level(logging.INFO) - + def test_create_area_with_custom_error(self, monkeypatch, local_study): def mock_error_in_sets_ini(): raise CustomError("An error occurred while processing area can not be created") @@ -2327,68 +2317,6 @@ def test_constraint_term_with_offset_and_ini_have_correct_values( assert actual_ini_content == expected_ini_contents - def test_binding_constraint_with_timeseries_stores_ts_file(self, local_study_with_hydro): - # Given - ts_matrix = pd.DataFrame(np.zeros([365 * 24, 2])) - - # When - constraints = { - "lesser": - # Less than timeseries - local_study_with_hydro.create_binding_constraint( - name="test constraint - less", - properties=BindingConstraintProperties( - operator=BindingConstraintOperator.LESS, - ), - less_term_matrix=ts_matrix, - ), - "equal": - # Equal timeseries - local_study_with_hydro.create_binding_constraint( - name="test constraint - equal", - properties=BindingConstraintProperties( - operator=BindingConstraintOperator.EQUAL, - ), - equal_term_matrix=ts_matrix, - ), - "greater": - # Greater than timeseries - local_study_with_hydro.create_binding_constraint( - name="test constraint - greater", - properties=BindingConstraintProperties( - operator=BindingConstraintOperator.GREATER, - ), - greater_term_matrix=ts_matrix, - ), - "both": - # Greater than timeseries - local_study_with_hydro.create_binding_constraint( - name="test constraint - both", - properties=BindingConstraintProperties( - operator=BindingConstraintOperator.BOTH, - ), - less_term_matrix=ts_matrix, - greater_term_matrix=ts_matrix, - ), - } - - # Then - assert local_study_with_hydro._binding_constraints_service.time_series[ - f"{constraints['lesser'].id.lower()}_lt" - ].local_file.file_path.is_file() - assert local_study_with_hydro._binding_constraints_service.time_series[ - f"{constraints['equal'].id.lower()}_eq" - ].local_file.file_path.is_file() - assert local_study_with_hydro._binding_constraints_service.time_series[ - f"{constraints['greater'].id.lower()}_gt" - ].local_file.file_path.is_file() - assert local_study_with_hydro._binding_constraints_service.time_series[ - f"{constraints['both'].id.lower()}_lt" - ].local_file.file_path.is_file() - assert local_study_with_hydro._binding_constraints_service.time_series[ - f"{constraints['both'].id.lower()}_gt" - ].local_file.file_path.is_file() - def test_binding_constraints_have_correct_default_time_series(self, test_constraint, local_study_with_constraint): # Given expected_time_series_hourly = pd.DataFrame(np.zeros([365 * 24 + 24, 1])) @@ -2411,74 +2339,50 @@ def test_binding_constraints_have_correct_default_time_series(self, test_constra operator=BindingConstraintOperator.BOTH, time_step=BindingConstraintFrequency.HOURLY ), ) - expected_pre_created_ts_file = ( - local_study_with_constraint.service.config.study_path - / TimeSeriesFileType.BINDING_CONSTRAINT_LESS.value.format(constraint_id=test_constraint.id) - ) - - # When - with local_study_with_constraint._binding_constraints_service.time_series[ - f"{test_constraint.id}_lt" - ].local_file.file_path.open("r") as pre_created_file: - actual_time_series_pre_created = pd.read_csv(pre_created_file, header=None) - with local_study_with_constraint._binding_constraints_service.time_series[ - "test greater_gt" - ].local_file.file_path.open("r") as greater_file: - actual_time_series_greater = pd.read_csv(greater_file, header=None) - with local_study_with_constraint._binding_constraints_service.time_series[ - "test equal_eq" - ].local_file.file_path.open("r") as equal_file: - actual_time_series_equal = pd.read_csv(equal_file, header=None) - with local_study_with_constraint._binding_constraints_service.time_series[ - "test both_gt" - ].local_file.file_path.open("r") as both_greater_file: - actual_time_series_both_greater = pd.read_csv(both_greater_file, header=None) - with local_study_with_constraint._binding_constraints_service.time_series[ - "test both_lt" - ].local_file.file_path.open("r") as both_lesser_file: - actual_time_series_both_lesser = pd.read_csv(both_lesser_file, header=None) # Then - # Verify that file names are created correctly - assert ( - local_study_with_constraint._binding_constraints_service.time_series[ - f"{test_constraint.id}_lt" - ].local_file.file_path - == expected_pre_created_ts_file - ) - # Verify that default file contents are the correct and expected - assert actual_time_series_pre_created.equals(expected_time_series_hourly) + local_config = t.cast(LocalConfiguration, local_study_with_constraint.service.config) + study_path = local_config.study_path + + actual_file_path = study_path.joinpath(Path("input") / "bindingconstraints" / "test greater_gt.txt") + actual_time_series_greater = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=float) assert actual_time_series_greater.equals(expected_time_series_daily_weekly) + + actual_file_path = study_path.joinpath(Path("input") / "bindingconstraints" / "test equal_eq.txt") + actual_time_series_equal = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=float) assert actual_time_series_equal.equals(expected_time_series_daily_weekly) - assert actual_time_series_both_greater.equals(expected_time_series_hourly) + + actual_file_path = study_path.joinpath(Path("input") / "bindingconstraints" / f"{test_constraint.id}_lt.txt") + actual_time_series_pre_created = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=float) + assert actual_time_series_pre_created.equals(expected_time_series_hourly) + + actual_file_path = study_path.joinpath(Path("input") / "bindingconstraints" / "test both_lt.txt") + actual_time_series_both_lesser = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=float) assert actual_time_series_both_lesser.equals(expected_time_series_hourly) - def test_submitted_time_series_is_saved(self, local_study_with_constraint): + actual_file_path = study_path.joinpath(Path("input") / "bindingconstraints" / "test both_gt.txt") + actual_time_series_both_greater = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=float) + assert actual_time_series_both_greater.equals(expected_time_series_hourly) + + def test_submitted_time_series_is_saved(self, local_study): # Given expected_time_series = pd.DataFrame(np.ones([3, 1])) - local_study_with_constraint.create_binding_constraint( - name="test time series", + bc_name = "test time series" + local_study.create_binding_constraint( + name=bc_name, properties=BindingConstraintProperties( operator=BindingConstraintOperator.GREATER, time_step=BindingConstraintFrequency.HOURLY ), greater_term_matrix=expected_time_series, ) - expected_file_contents = """1.0 -1.0 -1.0 -""" - # When - with local_study_with_constraint._binding_constraints_service.time_series[ - "test time series_gt" - ].local_file.file_path.open("r") as time_series_file: - actual_time_series = pd.read_csv(time_series_file, header=None) - time_series_file.seek(0) - actual_file_contents = time_series_file.read() + local_config = t.cast(LocalConfiguration, local_study.service.config) + study_path = local_config.study_path + actual_file_path = study_path.joinpath(Path("input") / "bindingconstraints" / f"{bc_name}_gt.txt") + actual_time_series = pd.read_csv(actual_file_path, sep="\t", header=None, dtype=float) # Then assert actual_time_series.equals(expected_time_series) - assert actual_file_contents == expected_file_contents def test_updating_binding_constraint_properties_updates_local(self, local_study_with_constraint, test_constraint): # Given diff --git a/tests/antares/tools/conftest.py b/tests/antares/tools/conftest.py deleted file mode 100644 index ccbb86ae..00000000 --- a/tests/antares/tools/conftest.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import pytest - -import numpy as np -import pandas as pd - -from antares.tools.time_series_tool import TimeSeriesFile, TimeSeriesFileType - - -@pytest.fixture -def time_series_data(): - return pd.DataFrame(np.zeros([2, 3])) - - -@pytest.fixture -def time_series_file(tmp_path, time_series_data): - return TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, area_id="test", time_series=time_series_data) diff --git a/tests/antares/tools/test_time_series_tool.py b/tests/antares/tools/test_time_series_tool.py deleted file mode 100644 index e52bf512..00000000 --- a/tests/antares/tools/test_time_series_tool.py +++ /dev/null @@ -1,129 +0,0 @@ -# Copyright (c) 2024, RTE (https://www.rte-france.com) -# -# See AUTHORS.txt -# -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. -# -# SPDX-License-Identifier: MPL-2.0 -# -# This file is part of the Antares project. - -import pytest - -import numpy as np -import pandas as pd - -from antares.tools.time_series_tool import TimeSeries, TimeSeriesFile, TimeSeriesFileType - - -class TestTimeSeries: - def test_empty_ts_is_dataframe(self): - # Given - time_series = TimeSeries() - - assert isinstance(time_series.time_series, pd.DataFrame) - assert time_series.time_series.empty - assert time_series.time_series.equals(pd.DataFrame([])) - - def test_time_series_can_be_set(self, time_series_data): - # Given - time_series = TimeSeries() - expected_time_series = pd.DataFrame(np.zeros(time_series_data.shape)) - - # When - time_series.time_series = time_series_data - - # Then - assert time_series.time_series.equals(expected_time_series) - - def test_time_series_can_have_file(self, time_series_file): - # Given - time_series = TimeSeries() - - # When - time_series.local_file = time_series_file - - # Then - assert time_series.local_file.file_path.is_file() - - def test_time_series_can_update_file(self, time_series_file, time_series_data): - # Given - time_series = TimeSeries() - expected_file_content = pd.DataFrame(np.zeros(time_series_data.shape)) - update_file_content = pd.DataFrame(np.ones(time_series_data.shape)) - - # When - time_series.local_file = time_series_file - - # Then - assert time_series.time_series.equals(expected_file_content) - - # When - time_series.time_series = update_file_content - - # Then - actual_file_content = pd.read_csv( - time_series.local_file.file_path, sep="\t", header=None, index_col=None, encoding="utf-8" - ) - assert actual_file_content.equals(update_file_content) - - -class TestTimeSeriesFile: - def test_time_series_file_can_be_set(self, time_series_file, time_series_data): - # Given - time_series = TimeSeries() - - # When - time_series.local_file = time_series_file - - # Then - assert time_series.time_series.equals(time_series_data) - assert time_series_file.file_path.is_file() - assert time_series.local_file is not None - - def test_time_series_file_time_series_can_be_updated(self, time_series_file, time_series_data): - # Given - time_series = TimeSeries(pd.DataFrame(np.ones([2, 3]))) - - # When - time_series_file.time_series = time_series.time_series - - with pytest.raises(AssertionError): - assert time_series_file.time_series.equals(time_series_data) - # assert time_series.local_file.file_path.is_file() - assert time_series_file.time_series.equals(time_series.time_series) - - def test_no_area_provided_gives_error(self, tmp_path, time_series_data): - # Given - with pytest.raises(ValueError, match="area_id is required for this file type."): - TimeSeriesFile(ts_file_type=TimeSeriesFileType.RESERVES, study_path=tmp_path, time_series=time_series_data) - - def test_file_exists_time_series_provided_gives_error(self, tmp_path, time_series_data): - # Given - time_series = TimeSeries(time_series_data) - file_name = TimeSeriesFileType.RESERVES.value.format(area_id="test") - - # When - (tmp_path / file_name).parent.mkdir(exist_ok=True, parents=True) - time_series.time_series.to_csv(tmp_path / file_name, sep="\t", header=False, index=False, encoding="utf-8") - - # Then - with pytest.raises( - ValueError, match=f"File {tmp_path / file_name} already exists and a time series was provided." - ): - TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, area_id="test", time_series=time_series.time_series) - - def test_file_exists_no_time_series_provided(self, tmp_path, time_series_data): - # Given - time_series = TimeSeries(time_series_data) - file_name = tmp_path / TimeSeriesFileType.RESERVES.value.format(area_id="test") - - # When - file_name.parent.mkdir(exist_ok=True, parents=True) - time_series.time_series.to_csv(file_name, sep="\t", header=False, index=False, encoding="utf-8") - time_series_file = TimeSeriesFile(TimeSeriesFileType.RESERVES, tmp_path, area_id="test") - - # Then - assert time_series_file.time_series.equals(time_series_data) diff --git a/tox.ini b/tox.ini index 6398bed4..dd5616bb 100644 --- a/tox.ini +++ b/tox.ini @@ -1,17 +1,22 @@ [tox] env_list = - py3.{9,10,11,12}-test + py3.{9,10,12}-test lint [testenv] deps = -r requirements-dev.txt -[testenv:py3.{9,10,11,12}-test] +[testenv:py3.{9,10,12}-test] description = run the tests with pytest commands = pytest tests/antares {posargs} +[testenv:coverage] +description = Run tests with coverage (with Python 3.11 inside the CI) +commands = + pytest --cov src --cov-report xml tests/antares + [testenv:lint] description = linting with ruff skip_install = True