diff --git a/src/andromede/input_converter/src/converter.py b/src/andromede/input_converter/src/converter.py index 7f171f0e..34976ac5 100644 --- a/src/andromede/input_converter/src/converter.py +++ b/src/andromede/input_converter/src/converter.py @@ -41,7 +41,7 @@ def __init__( Initialize processor """ self.logger = logger - self.period = period if period else 168 + self.period: int = period if period else 168 if isinstance(study_input, Study): self.study = study_input @@ -55,6 +55,7 @@ def __init__( self.output_path = ( Path(output_path) if output_path else self.study_path / Path("output.yaml") ) + self.areas: Iterable[Area] = self.study.get_areas().values() def _check_dataframe_validity(self, df: DataFrame) -> bool: """ @@ -69,13 +70,11 @@ def _check_dataframe_validity(self, df: DataFrame) -> bool: return True - def _convert_area_to_component_list( - self, areas: Iterable[Area], lib_id: str - ) -> list[InputComponent]: + def _convert_area_to_component_list(self, lib_id: str) -> list[InputComponent]: components = [] self.logger.info("Converting areas to component list...") - for area in areas: + for area in self.areas: components.append( InputComponent( id=area.id, @@ -99,12 +98,12 @@ def _convert_area_to_component_list( return components def _convert_renewable_to_component_list( - self, areas: Iterable[Area], lib_id: str + self, lib_id: str ) -> tuple[list[InputComponent], list[InputPortConnections]]: components = [] connections = [] self.logger.info("Converting renewables to component list...") - for area in areas: + for area in self.areas: renewables = area.get_renewables() for renewable in renewables.values(): series_path = ( @@ -154,14 +153,14 @@ def _convert_renewable_to_component_list( return components, connections def _convert_thermal_to_component_list( - self, areas: Iterable[Area], lib_id: str + self, lib_id: str ) -> tuple[list[InputComponent], list[InputPortConnections]]: components = [] connections = [] self.logger.info("Converting thermals to component list...") # Add thermal components for each area - for area in areas: + for area in self.areas: thermals = area.get_thermals() for thermal in thermals.values(): series_path = ( @@ -179,11 +178,15 @@ def _convert_thermal_to_component_list( id=thermal.id, model=f"{lib_id}.thermal", parameters=[ - tdp.process_p_min_cluster(), - tdp.process_nb_units_min(), - tdp.process_nb_units_max(), - tdp.process_nb_units_max_variation_forward(self.period), - tdp.process_nb_units_max_variation_backward(self.period), + tdp.generate_component_parameter("p_min_cluster"), + tdp.generate_component_parameter("nb_units_min"), + tdp.generate_component_parameter("nb_units_max"), + tdp.generate_component_parameter( + "nb_units_max_variation_forward", self.period + ), + tdp.generate_component_parameter( + "nb_units_max_variation_backward", self.period + ), InputComponentParameter( id="unit_count", time_dependent=False, @@ -259,13 +262,13 @@ def _convert_thermal_to_component_list( return components, connections def _convert_st_storage_to_component_list( - self, areas: Iterable[Area], lib_id: str + self, lib_id: str ) -> tuple[list[InputComponent], list[InputPortConnections]]: components = [] connections = [] self.logger.info("Converting short-term storages to component list...") # Add thermal components for each area - for area in areas: + for area in self.areas: storages = area.get_st_storages() for storage in storages.values(): series_path = ( @@ -431,12 +434,12 @@ def _convert_link_to_component_list( return components, connections def _convert_wind_to_component_list( - self, areas: Iterable[Area], lib_id: str + self, lib_id: str ) -> tuple[list[InputComponent], list[InputPortConnections]]: components = [] connections = [] self.logger.info("Converting wind to component list...") - for area in areas: + for area in self.areas: series_path = ( self.study_path / "input" / "wind" / "series" / f"wind_{area.id}.txt" ) @@ -468,12 +471,12 @@ def _convert_wind_to_component_list( return components, connections def _convert_solar_to_component_list( - self, areas: Iterable[Area], lib_id: str + self, lib_id: str ) -> tuple[list[InputComponent], list[InputPortConnections]]: components = [] connections = [] self.logger.info("Converting solar to component list...") - for area in areas: + for area in self.areas: series_path = ( self.study_path / "input" / "solar" / "series" / f"solar_{area.id}.txt" ) @@ -506,12 +509,12 @@ def _convert_solar_to_component_list( return components, connections def _convert_load_to_component_list( - self, areas: Iterable[Area], lib_id: str + self, lib_id: str ) -> tuple[list[InputComponent], list[InputPortConnections]]: components = [] connections = [] self.logger.info("Converting load to component list...") - for area in areas: + for area in self.areas: series_path = ( self.study_path / "input" / "load" / "series" / f"load_{area.id}.txt" ) @@ -544,10 +547,7 @@ def _convert_load_to_component_list( def convert_study_to_input_study(self) -> InputSystem: antares_historic_lib_id = "antares-historic" - areas = self.study.get_areas().values() - area_components = self._convert_area_to_component_list( - areas, antares_historic_lib_id - ) + area_components = self._convert_area_to_component_list(antares_historic_lib_id) list_components: list[InputComponent] = [] list_connections: list[InputPortConnections] = [] @@ -567,7 +567,7 @@ def convert_study_to_input_study(self) -> InputSystem: ] for method in conversion_methods: - components, connections = method(areas, antares_historic_lib_id) + components, connections = method(antares_historic_lib_id) list_components.extend(components) list_connections.extend(connections) diff --git a/src/andromede/input_converter/src/data_preprocessing/thermal.py b/src/andromede/input_converter/src/data_preprocessing/thermal.py index 75acc724..730efc41 100644 --- a/src/andromede/input_converter/src/data_preprocessing/thermal.py +++ b/src/andromede/input_converter/src/data_preprocessing/thermal.py @@ -1,4 +1,6 @@ +from enum import Enum from pathlib import Path +from typing import Callable import numpy as np import pandas as pd @@ -8,11 +10,17 @@ from andromede.study.parsing import InputComponentParameter +class Direction(Enum): + FORWARD = "forward" + BACKWARD = "backward" + + class ThermalDataPreprocessing: + DEFAULT_PERIOD: int = 168 + def __init__(self, thermal: ThermalCluster, study_path: Path): self.thermal = thermal self.study_path = study_path - self.series_path = ( self.study_path / "input" @@ -22,127 +30,89 @@ def __init__(self, thermal: ThermalCluster, study_path: Path): / self.thermal.id ) - def _write_dataframe_to_csv(self, dataframe: pd.DataFrame, filename: str) -> Path: - csv_path = self.series_path / filename - # This separator is chosen to comply with the antares_craft timeseries creation - dataframe.to_csv(csv_path, sep="\t", index=False, header=False) - - return csv_path - def _compute_p_min_cluster(self) -> pd.DataFrame: - modulation_data = self.thermal.get_prepro_modulation_matrix().iloc[:, 3] - series_data = self.thermal.get_series_matrix() - - unit_count = self.thermal.properties.unit_count - nominal_capacity = self.thermal.properties.nominal_capacity - modulation_data = modulation_data * nominal_capacity * unit_count - - min_values = pd.concat([modulation_data, series_data], axis=1).min(axis=1) - return min_values.to_frame( - name="p_min_cluster" - ) # Convert from series to dataframe - - def process_p_min_cluster(self) -> InputComponentParameter: - p_min_cluster = self._compute_p_min_cluster() - csv_path = self._write_dataframe_to_csv(p_min_cluster, "p_min_cluster.txt") - - return InputComponentParameter( - id="p_min_cluster", - time_dependent=True, - scenario_dependent=True, - value=str(csv_path).removesuffix(".txt"), + modulation_data: pd.Series = self.thermal.get_prepro_modulation_matrix().iloc[ + :, 3 + ] + series_data: pd.DataFrame = self.thermal.get_series_matrix() + unit_count: int = self.thermal.properties.unit_count + nominal_capacity: float = self.thermal.properties.nominal_capacity + scaled_modulation: pd.Series = modulation_data * nominal_capacity * unit_count + # min(min_gen_modulation * unit_count * nominal_capacity, p_max_cluster) + min_values: pd.Series = pd.concat([scaled_modulation, series_data], axis=1).min( + axis=1 ) + return min_values.to_frame(name="p_min_cluster") def _compute_nb_units_min(self) -> pd.DataFrame: - p_min_cluster = load_ts_from_txt("p_min_cluster", self.series_path) - return pd.DataFrame( - np.ceil(p_min_cluster / self.thermal.properties.nominal_capacity) + p_min_cluster: pd.DataFrame = load_ts_from_txt( + "p_min_cluster", self.series_path ) - - def process_nb_units_min(self) -> InputComponentParameter: - nb_units_min = self._compute_nb_units_min() - csv_path = self._write_dataframe_to_csv(nb_units_min, "nb_units_min.txt") - - return InputComponentParameter( - id="nb_units_min", - time_dependent=True, - scenario_dependent=True, - value=str(csv_path).removesuffix(".txt"), + nominal_capacity: float = self.thermal.properties.nominal_capacity + return pd.DataFrame( + np.ceil(p_min_cluster / nominal_capacity), ) def _compute_nb_units_max(self) -> pd.DataFrame: - series_data = self.thermal.get_series_matrix() - + series_data: pd.DataFrame = self.thermal.get_series_matrix() + nominal_capacity: float = self.thermal.properties.nominal_capacity return pd.DataFrame( - np.ceil(series_data / self.thermal.properties.nominal_capacity) + np.ceil(series_data / nominal_capacity), ) - def process_nb_units_max(self) -> InputComponentParameter: - nb_units_max = self._compute_nb_units_max() - csv_path = self._write_dataframe_to_csv(nb_units_max, "nb_units_max.txt") - - return InputComponentParameter( - id="nb_units_max", - time_dependent=True, - scenario_dependent=True, - value=str(csv_path).removesuffix(".txt"), - ) - - def _compute_nb_units_max_variation_forward( - self, period: int = 168 + def _compute_nb_units_max_variation( + self, direction: Direction, period: int = DEFAULT_PERIOD ) -> pd.DataFrame: - nb_units_max_output = load_ts_from_txt("nb_units_max", self.series_path) + nb_units_max = load_ts_from_txt("nb_units_max", self.series_path) previous_indices = [] - for i in range(len(nb_units_max_output)): - previous_indices.append((i - 1) % period + (i // period) * period) - nb_units_max_output = nb_units_max_output.iloc[previous_indices].reset_index( - drop=True - ) - nb_units_max_output.reset_index(drop=True) - - return nb_units_max_output.applymap(lambda x: max(0, x)) # type: ignore - - def process_nb_units_max_variation_forward( - self, period: int = 168 - ) -> InputComponentParameter: - nb_units_max_variation = self._compute_nb_units_max_variation_forward( - period=period - ) - csv_path = self._write_dataframe_to_csv( - nb_units_max_variation, "nb_units_max_variation_forward.txt" - ) - - return InputComponentParameter( - id="nb_units_max_variation_forward", - time_dependent=True, - scenario_dependent=True, - value=str(csv_path).removesuffix(".txt"), + indices = np.arange(len(nb_units_max)) + previous_indices = (indices - 1) % period + (indices // period) * period + + variation = pd.DataFrame() + if direction.value == "backward": + variation = nb_units_max.reset_index(drop=True) - nb_units_max.iloc[ + previous_indices + ].reset_index(drop=True) + elif direction.value == "forward": + variation = nb_units_max.iloc[previous_indices].reset_index( + drop=True + ) - nb_units_max.reset_index(drop=True) + + # Utilisation d'une opération vectorisée au lieu de applymap + variation = variation.clip(lower=0) + return variation.rename( + columns={variation.columns[0]: f"nb_units_max_variation_{direction.value}"} ) - def _compute_nb_units_max_variation_backward( - self, period: int = 168 - ) -> pd.DataFrame: - nb_units_max_output = load_ts_from_txt("nb_units_max", self.series_path) - previous_indices = [] - for i in range(len(nb_units_max_output)): - previous_indices.append((i - 1) % period + (i // period) * period) - nb_units_max_output = nb_units_max_output.reset_index( - drop=True - ) - nb_units_max_output.iloc[previous_indices].reset_index(drop=True) - - return nb_units_max_output.applymap(lambda x: max(0, x)) # type: ignore + def _build_csv_path(self, component_id: str, suffix: str = ".txt") -> Path: + return self.series_path / Path(f"{component_id}").with_suffix(suffix) - def process_nb_units_max_variation_backward( - self, period: int = 168 + def generate_component_parameter( + self, parameter_id: str, period: int = 0 ) -> InputComponentParameter: - nb_units_max_variation = self._compute_nb_units_max_variation_backward( - period=period - ) - csv_path = self._write_dataframe_to_csv( - nb_units_max_variation, "nb_units_max_variation_backward.txt" - ) + prepro_parameter_function: dict[str, Callable[[], pd.DataFrame]] = { + "p_min_cluster": self._compute_p_min_cluster, + "nb_units_min": self._compute_nb_units_min, + "nb_units_max": self._compute_nb_units_max, + "nb_units_max_variation_forward": lambda: self._compute_nb_units_max_variation( + Direction.FORWARD, period + ), + "nb_units_max_variation_backward": lambda: self._compute_nb_units_max_variation( + Direction.BACKWARD, period + ), + } + + if parameter_id not in prepro_parameter_function: + raise ValueError(f"Unsupported parameter_id: {parameter_id}") + + df = prepro_parameter_function[parameter_id]() + csv_path = self._build_csv_path(parameter_id) + + # This separator is chosen to comply with the antares_craft timeseries creation + df.to_csv(csv_path, sep="\t", index=False, header=False) return InputComponentParameter( - id="nb_units_max_variation_backward", + id=parameter_id, time_dependent=True, scenario_dependent=True, value=str(csv_path).removesuffix(".txt"), diff --git a/tests/antares_historic/conftest.py b/tests/antares_historic/conftest.py index 4ce883e7..b4859359 100644 --- a/tests/antares_historic/conftest.py +++ b/tests/antares_historic/conftest.py @@ -11,6 +11,7 @@ # This file is part of the Antares project. import pytest from antares.craft.model.area import AreaProperties +from antares.craft.model.st_storage import STStorageProperties from antares.craft.model.study import Study, create_study_local from antares.craft.model.thermal import ( LawOption, @@ -105,3 +106,33 @@ def local_study_end_to_end_w_thermal(local_study, default_thermal_cluster_proper thermal_name, properties=default_thermal_cluster_properties ) return local_study + + +@pytest.fixture +def default_st_storage_cluster_properties() -> STStorageProperties: + return STStorageProperties( + injection_nominal_capacity=10, + withdrawal_nominal_capacity=10, + reservoir_capacity=0, + efficiency=1, + initial_level=0.5, + initial_level_optim=False, + enabled=True, + ) + + +@pytest.fixture +def local_study_with_st_storage( + local_study_end_to_end_w_thermal, default_st_storage_cluster_properties +) -> Study: + """ + Create an empty study + Create an area with custom area properties + Create a thermal cluster with custom thermal properties + Create a short term storage + """ + storage_name = "battery" + local_study_end_to_end_w_thermal.get_areas()["fr"].create_st_storage( + storage_name, properties=default_st_storage_cluster_properties + ) + return local_study_end_to_end_w_thermal diff --git a/tests/antares_historic/test_antares_historic.py b/tests/antares_historic/test_antares_historic.py index 9f9e45e0..d6760623 100644 --- a/tests/antares_historic/test_antares_historic.py +++ b/tests/antares_historic/test_antares_historic.py @@ -1,3 +1,4 @@ +from dataclasses import dataclass from pathlib import Path import pandas as pd @@ -8,6 +9,7 @@ from andromede.model.parsing import InputLibrary, parse_yaml_library from andromede.model.resolve_library import resolve_library from andromede.simulation import TimeBlock, build_problem +from andromede.simulation.optimization import OptimizationProblem from andromede.study.data import load_ts_from_txt from andromede.study.parsing import InputSystem, parse_yaml_components from andromede.study.resolve_components import ( @@ -18,6 +20,12 @@ ) +@dataclass(frozen=True) +class ToolTestStudy: + study_component_data: InputSystem + study_path: Path + + def create_csv_from_constant_value( path, filename: str, lines: int, columns: int = 1, value: float = 1 ) -> None: @@ -124,7 +132,7 @@ def fill_timeseries(study_path) -> None: create_csv_from_constant_value(path=series_path, filename="nb_units_max", lines=3) -def _setup_study_component(study, period=None) -> tuple: +def _setup_study_component(study, period=None) -> ToolTestStudy: """ Helper function to reduce redundancy in study component setup. """ @@ -142,19 +150,24 @@ def _setup_study_component(study, period=None) -> tuple: compo_file = converter.output_path with compo_file.open() as c: - return parse_yaml_components(c), study_path + return ToolTestStudy(parse_yaml_components(c), study_path) @pytest.fixture -def study_component_basic(local_study_end_to_end_simple) -> tuple: +def study_component_basic(local_study_end_to_end_simple) -> ToolTestStudy: return _setup_study_component(local_study_end_to_end_simple) @pytest.fixture -def study_component_thermal(local_study_end_to_end_w_thermal) -> tuple: +def study_component_thermal(local_study_end_to_end_w_thermal) -> ToolTestStudy: return _setup_study_component(local_study_end_to_end_w_thermal, period=3) +@pytest.fixture +def study_component_st_storage(local_study_with_st_storage) -> ToolTestStudy: + return _setup_study_component(local_study_with_st_storage, period=3) + + @pytest.fixture def input_library( data_dir: Path, @@ -171,9 +184,9 @@ def input_library( return parse_yaml_library(lib) -def factory_balance_using_converter( - input_system: InputSystem, input_library: InputLibrary, expected_value: int -) -> None: +def problem_builder( + study_test_component: ToolTestStudy, input_library: InputLibrary +) -> OptimizationProblem: """ - Resolves the input library. - Constructs components and connections. @@ -181,8 +194,8 @@ def factory_balance_using_converter( - Builds the database and network. - Solves the optimization problem and verifies results. """ - study_path = input_system[1] - study_component_data = input_system[0] + study_path = study_test_component.study_path + study_component_data = study_test_component.study_component_data result_lib = resolve_library([input_library]) components_input = resolve_system(study_component_data, result_lib) @@ -194,10 +207,7 @@ def factory_balance_using_converter( network = build_network(components_input) scenarios = 1 - problem = build_problem(network, database, TimeBlock(1, [0, 1]), scenarios) - status = problem.solver.Solve() - assert status == problem.solver.OPTIMAL - assert problem.solver.Objective().Value() == expected_value + return build_problem(network, database, TimeBlock(1, [0, 1]), scenarios) def test_basic_balance_using_converter( @@ -206,9 +216,10 @@ def test_basic_balance_using_converter( """ Test basic study balance using the converter. """ - factory_balance_using_converter( - study_component_basic, input_library, expected_value=150 - ) + problem = problem_builder(study_component_basic, input_library) + status = problem.solver.Solve() + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 150 def test_thermal_balance_using_converter( @@ -217,6 +228,23 @@ def test_thermal_balance_using_converter( """ Test thermal study balance using the converter. """ - factory_balance_using_converter( - study_component_thermal, input_library, expected_value=165 - ) + problem = problem_builder(study_component_thermal, input_library) + + status = problem.solver.Solve() + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 165 + + +@pytest.skip("Pass test for the moment") +def test_storage_balance_using_converter( + study_component_st_storage: InputSystem, input_library: InputLibrary +) -> None: + """ + Test storage study balance using the converter. + """ + # Wait for new version 0.92 of antares craft which include efficiencywithdrawalparameter + problem = problem_builder(study_component_st_storage, input_library) + + status = problem.solver.Solve() + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 165 diff --git a/tests/input_converter/conftest.py b/tests/input_converter/conftest.py index dae392a0..ffc77408 100644 --- a/tests/input_converter/conftest.py +++ b/tests/input_converter/conftest.py @@ -35,27 +35,23 @@ def local_study(tmp_path) -> Study: return create_study_local(study_name, study_version, tmp_path.absolute()) -@pytest.fixture -def create_csv_from_constant_value(): - def _create_csv_from_constant_value( - path, filename: str, lines: int, columns: int = 1, value: int = 1 - ) -> None: - path = path / filename - - # Generate the data - data = {f"col_{i+1}": [value] * lines for i in range(columns)} - df = pd.DataFrame(data) - - # Write the data to a file - df.to_csv( - path.with_suffix(".txt"), - sep="\t", - index=False, - header=False, - encoding="utf-8", - ) +def create_dataframe_from_constant( + lines: int, + columns: int = 1, + value: int = 1, +) -> pd.DataFrame: + """ + Creates a DataFrame filled with a constant value for testing. - return _create_csv_from_constant_value + Args: + lines (int): Number of rows in the DataFrame. + columns (int, optional): Number of columns. Defaults to 1. + value (int, optional): Constant value to fill the DataFrame. Defaults to 1. + Returns: + pd.DataFrame: A DataFrame with the specified dimensions, filled with the constant value. + """ + data = {f"col_{i+1}": [value] * lines for i in range(columns)} + return pd.DataFrame(data) @pytest.fixture @@ -90,7 +86,7 @@ def local_study_w_links(local_study_w_areas) -> Study: @pytest.fixture -def local_study_w_thermal(local_study_w_links) -> Study: +def local_study_w_thermal(local_study_w_links: Study, request: pytest.FixtureRequest) -> Study: """ Create an empty study Create 2 areas with custom area properties @@ -98,9 +94,21 @@ def local_study_w_thermal(local_study_w_links) -> Study: Create a thermal cluster """ thermal_name = "gaz" - local_study_w_links.get_areas()["fr"].create_thermal_cluster( - thermal_name, ThermalClusterProperties(unit_count=1, nominal_capacity=2.0) - ) + if hasattr(request, "param"): + modulation_df, series_df = request.param + local_study_w_links.get_areas()["fr"].create_thermal_cluster( + thermal_name, + ThermalClusterProperties(unit_count=1, nominal_capacity=2.0), + prepro=None, + modulation=modulation_df, + series=series_df, + co2_cost=None, + fuel_cost=None, + ) + else: + local_study_w_links.get_areas()["fr"].create_thermal_cluster( + thermal_name, ThermalClusterProperties(unit_count=1, nominal_capacity=2.0) + ) return local_study_w_links @@ -114,7 +122,7 @@ def local_study_with_renewable(local_study_w_thermal) -> Study: Create a renewable cluster """ renewable_cluster_name = "generation" - time_serie = pd.DataFrame( + timeseries = pd.DataFrame( [ [-9999999980506447872, 0, 9999999980506447872], [0, "fr", 0], @@ -122,7 +130,7 @@ def local_study_with_renewable(local_study_w_thermal) -> Study: dtype="object", ) local_study_w_thermal.get_areas()["fr"].create_renewable_cluster( - renewable_cluster_name, RenewableClusterProperties(), series=time_serie + renewable_cluster_name, RenewableClusterProperties(), series=timeseries ) return local_study_w_thermal diff --git a/tests/input_converter/test_converter.py b/tests/input_converter/test_converter.py index 3f342ad4..e81ed982 100644 --- a/tests/input_converter/test_converter.py +++ b/tests/input_converter/test_converter.py @@ -10,11 +10,13 @@ # # This file is part of the Antares project. -from typing import Callable, Literal +from typing import Literal import pandas as pd import pytest +from antares.craft.model.area import Area from antares.craft.model.study import Study +from antares.craft.model.thermal import ThermalCluster from andromede.input_converter.src.converter import AntaresStudyConverter from andromede.input_converter.src.data_preprocessing.thermal import ( @@ -29,21 +31,26 @@ InputSystem, parse_yaml_components, ) +from tests.input_converter.conftest import create_dataframe_from_constant + +DATAFRAME_PREPRO_THERMAL_CONFIG = ( + create_dataframe_from_constant(lines=840, columns=4), # modulation + create_dataframe_from_constant(lines=840), # series +) class TestConverter: - def _init_area_reading(self, local_study): + def _init_study_converter(self, local_study): logger = Logger(__name__, local_study.service.config.study_path) - converter = AntaresStudyConverter(study_input=local_study, logger=logger) - areas = converter.study.get_areas().values() - return areas, converter + converter: AntaresStudyConverter = AntaresStudyConverter( + study_input=local_study, logger=logger + ) + return converter def test_convert_study_to_input_study(self, local_study_w_areas: Study): - logger = Logger(__name__, local_study_w_areas.service.config.study_path) - converter = AntaresStudyConverter( - study_input=local_study_w_areas, logger=logger - ) + converter = self._init_study_converter(local_study_w_areas) input_study = converter.convert_study_to_input_study() + expected_input_study = InputSystem( nodes=[ InputComponent( @@ -102,8 +109,8 @@ def test_convert_study_to_input_study(self, local_study_w_areas: Study): assert input_study == expected_input_study def test_convert_area_to_component(self, local_study_w_areas: Study, lib_id: str): - areas, converter = self._init_area_reading(local_study_w_areas) - area_components = converter._convert_area_to_component_list(areas, lib_id) + converter = self._init_study_converter(local_study_w_areas) + area_components = converter._convert_area_to_component_list(lib_id) expected_area_components = [ InputComponent( @@ -155,17 +162,83 @@ def test_convert_area_to_component(self, local_study_w_areas: Study, lib_id: str area_components.sort(key=lambda x: x.id) assert area_components == expected_area_components + def test_convert_area_to_yaml(self, local_study_w_areas: Study, lib_id: str): + converter = self._init_study_converter(local_study_w_areas) + area_components = converter._convert_area_to_component_list(lib_id) + input_study = InputSystem(nodes=area_components) + + # Dump model into yaml file + yaml_path = converter.study_path / "study_path.yaml" + transform_to_yaml(model=input_study, output_path=yaml_path) + + # Open yaml file to validate + with open(yaml_path, "r", encoding="utf-8") as yaml_file: + validated_data = parse_yaml_components(yaml_file) + + expected_validated_data = InputSystem( + nodes=[ + InputComponent( + id="it", + model="antares-historic.area", + scenario_group=None, + parameters=[ + InputComponentParameter( + id="ens_cost", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=0.5, + ), + InputComponentParameter( + id="spillage_cost", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=1.0, + ), + ], + ), + InputComponent( + id="fr", + model="antares-historic.area", + scenario_group=None, + parameters=[ + InputComponentParameter( + id="ens_cost", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=0.5, + ), + InputComponentParameter( + id="spillage_cost", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=1.0, + ), + ], + ), + ], + components=[], + connections=[], + ) + + expected_validated_data.nodes.sort(key=lambda x: x.id) + validated_data.nodes.sort(key=lambda x: x.id) + assert validated_data == expected_validated_data + def test_convert_renewables_to_component( self, local_study_with_renewable: Study, lib_id: str ): - areas, converter = self._init_area_reading(local_study_with_renewable) + converter = self._init_study_converter(local_study_with_renewable) study_path = converter.study_path ( renewables_components, renewable_connections, - ) = converter._convert_renewable_to_component_list(areas, lib_id) + ) = converter._convert_renewable_to_component_list(lib_id) - timeserie_path = str( + timeseries_path = str( study_path / "input" / "renewables" @@ -207,7 +280,7 @@ def test_convert_renewables_to_component( time_dependent=True, scenario_dependent=True, scenario_group=None, - value=f"{timeserie_path}", + value=f"{timeseries_path}", ), ], ) @@ -218,12 +291,13 @@ def test_convert_renewables_to_component( def test_convert_st_storages_to_component( self, local_study_with_st_storage, lib_id: str ): - areas, converter = self._init_area_reading(local_study_with_st_storage) + converter = self._init_study_converter(local_study_with_st_storage) study_path = converter.study_path ( storage_components, storage_connections, - ) = converter._convert_st_storage_to_component_list(areas, lib_id) + ) = converter._convert_st_storage_to_component_list(lib_id) + default_path = study_path / "input" / "st-storage" / "series" / "fr" / "battery" inflows_path = default_path / "inflows" lower_rule_curve_path = default_path / "lower-rule-curve" @@ -317,49 +391,33 @@ def test_convert_st_storages_to_component( ], ) ] - print("actual: ", storage_components) - print("epxected: ", expected_storage_component) + assert storage_components == expected_storage_component assert storage_connections == expected_storage_connections + @pytest.mark.parametrize( + "local_study_w_thermal", + [DATAFRAME_PREPRO_THERMAL_CONFIG], + indirect=True, + ) def test_convert_thermals_to_component( self, local_study_w_thermal: Study, - create_csv_from_constant_value: Callable[..., None], lib_id: str, ): - areas, converter = self._init_area_reading(local_study_w_thermal) + converter = self._init_study_converter(local_study_w_thermal) study_path = converter.study_path - # I just want to fill the modulation and series files - modulation_timeseries = ( - study_path / "input" / "thermal" / "prepro" / "fr" / "gaz" - ) - series_path = study_path / "input" / "thermal" / "series" / "fr" / "gaz" - # We have to use a multiple of 168, to match with full weeks - create_csv_from_constant_value(modulation_timeseries, "modulation", 840, 4) - create_csv_from_constant_value(series_path, "series", 840) - - self._generate_tdp_instance_parameter( - areas, study_path, create_dataframes=False - ) ( thermals_components, thermals_connections, - ) = converter._convert_thermal_to_component_list(areas, lib_id) + ) = converter._convert_thermal_to_component_list(lib_id) study_path = converter.study_path - p_max_timeserie = str( - study_path / "input" / "thermal" / "series" / "fr" / "gaz" / "series" - ) - p_min_cluster = str( - study_path / "input" / "thermal" / "series" / "fr" / "gaz" / "p_min_cluster" - ) - nb_units_min = str( - study_path / "input" / "thermal" / "series" / "fr" / "gaz" / "nb_units_min" - ) - nb_units_max = str( - study_path / "input" / "thermal" / "series" / "fr" / "gaz" / "nb_units_max" - ) + series_path = study_path / "input" / "thermal" / "series" / "fr" / "gaz" + p_max_timeseries = str(series_path / "series") + p_min_cluster = str(series_path / "p_min_cluster") + nb_units_min = str(series_path / "nb_units_min") + nb_units_max = str(series_path / "nb_units_max") nb_units_max_variation_forward = str( study_path / "input" @@ -495,94 +553,27 @@ def test_convert_thermals_to_component( time_dependent=True, scenario_dependent=True, scenario_group=None, - value=f"{p_max_timeserie}", + value=f"{p_max_timeseries}", ), ], ) ] - print("ACTUAL:", thermals_components) - print("EXPECTED:", expected_thermals_components) assert thermals_components == expected_thermals_components assert thermals_connections == expected_thermals_connections - def test_convert_area_to_yaml(self, local_study_w_areas: Study, lib_id: str): - areas, converter = self._init_area_reading(local_study_w_areas) - area_components = converter._convert_area_to_component_list(areas, lib_id) - input_study = InputSystem(nodes=area_components) - - # Dump model into yaml file - yaml_path = converter.study_path / "study_path.yaml" - transform_to_yaml(model=input_study, output_path=yaml_path) - - # Open yaml file to validate - with open(yaml_path, "r", encoding="utf-8") as yaml_file: - validated_data = parse_yaml_components(yaml_file) - - expected_validated_data = InputSystem( - nodes=[ - InputComponent( - id="it", - model="antares-historic.area", - scenario_group=None, - parameters=[ - InputComponentParameter( - id="ens_cost", - time_dependent=False, - scenario_dependent=False, - scenario_group=None, - value=0.5, - ), - InputComponentParameter( - id="spillage_cost", - time_dependent=False, - scenario_dependent=False, - scenario_group=None, - value=1.0, - ), - ], - ), - InputComponent( - id="fr", - model="antares-historic.area", - scenario_group=None, - parameters=[ - InputComponentParameter( - id="ens_cost", - time_dependent=False, - scenario_dependent=False, - scenario_group=None, - value=0.5, - ), - InputComponentParameter( - id="spillage_cost", - time_dependent=False, - scenario_dependent=False, - scenario_group=None, - value=1.0, - ), - ], - ), - ], - components=[], - connections=[], - ) - - expected_validated_data.nodes.sort(key=lambda x: x.id) - validated_data.nodes.sort(key=lambda x: x.id) - assert validated_data == expected_validated_data - def test_convert_solar_to_component( self, local_study_w_areas: Study, fr_solar: None, lib_id: str ): - areas, converter = self._init_area_reading(local_study_w_areas) + converter = self._init_study_converter(local_study_w_areas) solar_components, solar_connection = converter._convert_solar_to_component_list( - areas, lib_id + lib_id ) - study_path = converter.study_path - solar_timeseries = str(study_path / "input" / "solar" / "series" / "solar_fr") + solar_timeseries = str( + converter.study_path / "input" / "solar" / "series" / "solar_fr" + ) expected_solar_connection = [ InputPortConnections( component1="solar", @@ -612,14 +603,15 @@ def test_convert_solar_to_component( def test_convert_load_to_component( self, local_study_w_areas: Study, fr_load: None, lib_id: str ): - areas, converter = self._init_area_reading(local_study_w_areas) + converter = self._init_study_converter(local_study_w_areas) load_components, load_connection = converter._convert_load_to_component_list( - areas, lib_id + lib_id ) - study_path = converter.study_path - load_timeseries = str(study_path / "input" / "load" / "series" / "load_fr") + load_timeseries = str( + converter.study_path / "input" / "load" / "series" / "load_fr" + ) expected_load_connection = [ InputPortConnections( component1="load", @@ -656,14 +648,15 @@ def test_convert_load_to_component( def test_convert_wind_to_component_not_empty_file( self, local_study_w_areas: Study, fr_wind: int, lib_id: str ): - areas, converter = self._init_area_reading(local_study_w_areas) + converter = self._init_study_converter(local_study_w_areas) wind_components, wind_connection = converter._convert_wind_to_component_list( - areas, lib_id + lib_id ) - study_path = converter.study_path - wind_timeseries = str(study_path / "input" / "wind" / "series" / "wind_fr") + wind_timeseries = str( + converter.study_path / "input" / "wind" / "series" / "wind_fr" + ) expected_wind_connection = [ InputPortConnections( component1="wind", @@ -700,9 +693,9 @@ def test_convert_wind_to_component_not_empty_file( def test_convert_wind_to_component_empty_file( self, local_study_w_areas: Study, fr_wind: object, lib_id: str ): - areas, converter = self._init_area_reading(local_study_w_areas) + converter = self._init_study_converter(local_study_w_areas) - wind_components, _ = converter._convert_wind_to_component_list(areas, lib_id) + wind_components, _ = converter._convert_wind_to_component_list(lib_id) assert wind_components == [] @@ -716,14 +709,14 @@ def test_convert_wind_to_component_empty_file( def test_convert_wind_to_component_zero_values( self, local_study_w_areas: Study, fr_wind: int, lib_id: str ): - areas, converter = self._init_area_reading(local_study_w_areas) + converter = self._init_study_converter(local_study_w_areas) - wind_components, _ = converter._convert_wind_to_component_list(areas, lib_id) + wind_components, _ = converter._convert_wind_to_component_list(lib_id) assert wind_components == [] def test_convert_links_to_component(self, local_study_w_links: Study, lib_id: str): - _, converter = self._init_area_reading(local_study_w_links) + converter = self._init_study_converter(local_study_w_links) study_path = converter.study_path ( links_components, @@ -847,173 +840,236 @@ def test_convert_links_to_component(self, local_study_w_links: Study, lib_id: st ) assert links_connections == expected_link_connections - def _generate_tdp_instance_parameter( - self, areas, study_path, create_dataframes: bool = True - ): - if create_dataframes: - modulation_timeseries = str( - study_path - / "input" - / "thermal" - / "prepro" - / "fr" - / "gaz" - / "modulation.txt" - ) - series_path = ( - study_path - / "input" - / "thermal" - / "series" - / "fr" - / "gaz" - / "series.txt" - ) - data_p_max = [ - [1, 1, 1, 2], - [2, 2, 2, 6], - [3, 3, 3, 1], - ] - data_series = [ - [8], - [10], - [2], - ] - df = pd.DataFrame(data_p_max) - df.to_csv(modulation_timeseries, sep="\t", index=False, header=False) - - df = pd.DataFrame(data_series) - df.to_csv(series_path, sep="\t", index=False, header=False) - - for area in areas: - thermals = area.get_thermals() - for thermal in thermals.values(): - if thermal.area_id == "fr": - tdp = ThermalDataPreprocessing(thermal, study_path) - return tdp - - def _setup_test(self, local_study_w_thermal, filename): + def _setup_preprocessing_thermal( + self, local_study_w_thermal: Study + ) -> ThermalDataPreprocessing: """ Initializes test parameters and returns the instance and expected file path. """ - areas, converter = self._init_area_reading(local_study_w_thermal) - study_path = converter.study_path - instance = self._generate_tdp_instance_parameter(areas, study_path) - expected_path = ( - study_path / "input" / "thermal" / "series" / "fr" / "gaz" / filename + + logger = Logger(__name__, local_study_w_thermal.service.config.study_path) + converter: AntaresStudyConverter = AntaresStudyConverter( + study_input=local_study_w_thermal, logger=logger ) - return instance, expected_path + + def _get_thermal_prepro_with_first_thermal( + converter: AntaresStudyConverter, area_id: str = "fr" + ) -> ThermalDataPreprocessing: + areas: dict[Area] = converter.study.get_areas().values() + + thermal: ThermalCluster = next( + ( + thermal + for area in areas + for thermal in area.get_thermals().values() + if thermal.area_id == area_id + ), + None, + ) + return ThermalDataPreprocessing(thermal, converter.study_path) + + return _get_thermal_prepro_with_first_thermal(converter) def _validate_component( - self, instance, process_method, expected_path, expected_values + self, + component: InputComponentParameter, + component_id, + expected_path, + expected_values, ): """ Executes the given processing method, validates the component, and compares the output dataframe. """ - component = getattr(instance, process_method)() + expected_component = InputComponentParameter( - id=process_method.split("process_")[1], + id=component_id, time_dependent=True, scenario_dependent=True, - value=str(expected_path), + value=str(expected_path).removesuffix(".txt"), ) - current_df = pd.read_csv(expected_path.with_suffix(".txt"), header=None) + current_df = pd.read_csv(expected_path, header=None) expected_df = pd.DataFrame(expected_values) assert current_df.equals(expected_df) assert component == expected_component - def _test_p_min_cluster(self, local_study_w_thermal): + @pytest.mark.parametrize( + "local_study_w_thermal", + [ + ( + pd.DataFrame( + [ + [1, 1, 1, 2], + [2, 2, 2, 6], + [3, 3, 3, 1], + ] + ), # modulation + pd.DataFrame( + [ + [8], + [10], + [2], + ] + ), # series + ), + ], + indirect=True, + ) + def test_p_min_cluster(self, local_study_w_thermal): """Tests the p_min_cluster parameter processing.""" - instance, expected_path = self._setup_test( - local_study_w_thermal, "p_min_cluster.txt" + tdp = self._setup_preprocessing_thermal(local_study_w_thermal) + expected_path = ( + local_study_w_thermal.service.config.study_path + / "input" + / "thermal" + / "series" + / "fr" + / "gaz" + / "p_min_cluster.txt" ) expected_values = [ - [6.0], + [4.0], [10.0], [2.0], ] # min(min_gen_modulation * unit_count * nominal_capacity, p_max_cluster) + component = tdp.generate_component_parameter("p_min_cluster") self._validate_component( - instance, "process_p_min_cluster", expected_path, expected_values + component, "p_min_cluster", expected_path, expected_values ) + @pytest.mark.parametrize( + "local_study_w_thermal", + [ + ( + pd.DataFrame( + [ + [1, 1, 1, 2], + [2, 2, 2, 6], + [3, 3, 3, 1], + ] + ), # modulation + pd.DataFrame( + [ + [8], + [10], + [2], + ] + ), # series + ), + ], + indirect=True, + ) def test_nb_units_min(self, local_study_w_thermal: Study): """Tests the nb_units_min parameter processing.""" - instance, expected_path = self._setup_test( - local_study_w_thermal, "nb_units_min" + tdp = self._setup_preprocessing_thermal(local_study_w_thermal) + expected_path = ( + local_study_w_thermal.service.config.study_path + / "input" + / "thermal" + / "series" + / "fr" + / "gaz" + / "nb_units_min.txt" ) - instance.process_p_min_cluster() expected_values = [[2.0], [5.0], [1.0]] # ceil(p_min_cluster / p_max_unit) + + tdp.generate_component_parameter("p_min_cluster") + component = tdp.generate_component_parameter("nb_units_min") + self._validate_component( - instance, "process_nb_units_min", expected_path, expected_values + component, "nb_units_min", expected_path, expected_values ) + @pytest.mark.parametrize( + "local_study_w_thermal", + [ + ( + pd.DataFrame( + [ + [1, 1, 1, 2], + [2, 2, 2, 6], + [3, 3, 3, 1], + ] + ), # modulation + pd.DataFrame( + [ + [8], + [10], + [2], + ] + ), # series + ), + ], + indirect=True, + ) def test_nb_units_max(self, local_study_w_thermal: Study): """Tests the nb_units_max parameter processing.""" - instance, expected_path = self._setup_test( - local_study_w_thermal, "nb_units_max" + tdp = self._setup_preprocessing_thermal(local_study_w_thermal) + expected_path = ( + local_study_w_thermal.service.config.study_path + / "input" + / "thermal" + / "series" + / "fr" + / "gaz" + / "nb_units_max.txt" ) - instance.process_p_min_cluster() expected_values = [[4.0], [5.0], [1.0]] # ceil(p_max_cluster / p_max_unit) + + tdp.generate_component_parameter("p_min_cluster") + component = tdp.generate_component_parameter("nb_units_max") + self._validate_component( - instance, "process_nb_units_max", expected_path, expected_values + component, "nb_units_max", expected_path, expected_values ) - @pytest.mark.parametrize("direction", ["forward", "backward"]) - def test_nb_units_max_variation( + def nb_units_max_variation( self, local_study_w_thermal: Study, - create_csv_from_constant_value: Callable[..., None], direction: Literal["forward"] | Literal["backward"], ): """ Tests nb_units_max_variation_forward and nb_units_max_variation_backward processing. """ - instance, expected_path = self._setup_test( - local_study_w_thermal, f"nb_units_max_variation_{direction}" - ) - modulation_timeseries = ( - instance.study_path / "input" / "thermal" / "prepro" / "fr" / "gaz" - ) - series_path = ( - instance.study_path / "input" / "thermal" / "series" / "fr" / "gaz" + + tdp = self._setup_preprocessing_thermal(local_study_w_thermal) + expected_path = ( + local_study_w_thermal.service.config.study_path + / "input" + / "thermal" + / "series" + / "fr" + / "gaz" + / f"nb_units_max_variation_{direction}.txt" ) - create_csv_from_constant_value(modulation_timeseries, "modulation", 840, 4) - create_csv_from_constant_value(series_path, "series", 840) - instance.process_nb_units_max() - nb_units_max_output = pd.read_csv( - instance.series_path / "nb_units_max.txt", header=None + tdp.generate_component_parameter("nb_units_max") + + variation_component = tdp.generate_component_parameter( + f"nb_units_max_variation_{direction}" ) - variation_component = getattr( - instance, f"process_nb_units_max_variation_{direction}" - )() current_df = pd.read_csv(variation_component.value + ".txt", header=None) - assert current_df[0][0] == max( - 0, nb_units_max_output[0][167] - nb_units_max_output[0][0] - ) - assert current_df[0][3] == max( - 0, nb_units_max_output[0][2] - nb_units_max_output[0][3] - ) - assert current_df[0][168] == max( - 0, nb_units_max_output[0][335] - nb_units_max_output[0][168] + nb_units_max_output = pd.read_csv( + tdp.series_path / "nb_units_max.txt", header=None ) - assert variation_component.value == str(expected_path) - def test_nb_units_max_variation_forward( - self, - local_study_w_thermal: Study, - create_csv_from_constant_value: Callable[..., None], - ): - self.test_nb_units_max_variation( - local_study_w_thermal, create_csv_from_constant_value, direction="forward" - ) + diff = nb_units_max_output.shift(-1) - nb_units_max_output - def test_nb_units_max_variation_backward( - self, - local_study_w_thermal: Study, - create_csv_from_constant_value: Callable[..., None], + # La valeur ne peut pas etre au dessous de 0 et si il manque une ligne tout en bas, la rajoute + expected_df = diff.clip(lower=0).fillna(0) + + pd.testing.assert_frame_equal(current_df, expected_df, check_dtype=False) + assert variation_component.value == str(expected_path).removesuffix(".txt") + + @pytest.mark.parametrize( + "direction, local_study_w_thermal", + [ + ("forward", DATAFRAME_PREPRO_THERMAL_CONFIG), + ("backward", DATAFRAME_PREPRO_THERMAL_CONFIG), + ], + indirect=["local_study_w_thermal"], + ) + def test_nb_units_max_variation( + self, local_study_w_thermal: Study, direction: Literal["forward", "backward"] ): - self.test_nb_units_max_variation( - local_study_w_thermal, create_csv_from_constant_value, direction="backward" - ) + self.nb_units_max_variation(local_study_w_thermal, direction)