From 3733f0c88b9518f825a1bca27b3cb447bf7aa447 Mon Sep 17 00:00:00 2001 From: killian-scalian Date: Mon, 31 Mar 2025 17:50:46 +0200 Subject: [PATCH 1/3] short term storage handling --- requirements.in | 2 +- requirements.txt | 2 +- .../input_converter/src/converter.py | 112 +++++++- src/andromede/study/resolve_components.py | 1 + tests/antares_historic/conftest.py | 31 ++ .../antares_historic/test_antares_historic.py | 66 +++-- tests/input_converter/conftest.py | 24 +- tests/input_converter/test_converter.py | 265 ++++++++++-------- .../test_thermal_preprocessing.py | 185 ++++++++++++ 9 files changed, 542 insertions(+), 146 deletions(-) create mode 100644 tests/input_converter/test_thermal_preprocessing.py diff --git a/requirements.in b/requirements.in index fe32f895..a90d2410 100644 --- a/requirements.in +++ b/requirements.in @@ -4,4 +4,4 @@ scipy==1.10.1 antlr4-python3-runtime==4.13.1 PyYAML~=6.0.1 pydantic -antares_craft>=0.2.1 +antares_craft>=0.2.3 diff --git a/requirements.txt b/requirements.txt index 4732a1cb..73ba2a42 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,7 +8,7 @@ absl-py==2.1.0 # via ortools annotated-types==0.7.0 # via pydantic -antares-craft==0.2.1 +antares-craft==0.2.3 # via -r requirements.in antares-study-version==1.0.9 # via antares-craft diff --git a/src/andromede/input_converter/src/converter.py b/src/andromede/input_converter/src/converter.py index dada9723..8b4f309b 100644 --- a/src/andromede/input_converter/src/converter.py +++ b/src/andromede/input_converter/src/converter.py @@ -11,7 +11,7 @@ # This file is part of the Antares project. import logging from pathlib import Path -from typing import Optional, Union, Iterable +from typing import Iterable, Optional, Union from antares.craft.model.area import Area from antares.craft.model.study import Study, read_study_local @@ -258,6 +258,115 @@ def _convert_thermal_to_component_list( ) return components, connections + def _convert_st_storage_to_component_list( + self, areas: Iterable[Area] + ) -> tuple[list[InputComponent], list[InputPortConnections]]: + components = [] + connections = [] + self.logger.info("Converting short-term storages to component list...") + # Add thermal components for each area + for area in areas: + storages = area.get_st_storages() + for storage in storages.values(): + series_path = ( + self.study_path + / "input" + / "st-storage" + / "series" + / Path(storage.area_id) + / Path(storage.id) + ) + inflows_path = series_path / "inflows" + lower_rule_curve_path = series_path / "lower-rule-curve" + pmax_injection_path = series_path / "PMAX-injection" + pmax_withdrawal_path = series_path / "PMAX-withdrawal" + upper_rule_curve_path = series_path / "upper-rule-curve" + components.append( + InputComponent( + id=storage.id, + model="short-term-storage", + parameters=[ + InputComponentParameter( + id="efficiency_injection", + time_dependent=False, + scenario_dependent=False, + value=storage.properties.efficiency, + ), + # TODO wait for update of antares craft + # InputComponentParameter( + # id="efficiency_withdrawal", + # time_dependent=False, + # scenario_dependent=False, + # value=storage.properties.efficiencywithdrawal, + # ), + InputComponentParameter( + id="initial_level", + time_dependent=False, + scenario_dependent=False, + value=storage.properties.initial_level, + ), + InputComponentParameter( + id="reservoir_capacity", + time_dependent=False, + scenario_dependent=False, + value=storage.properties.reservoir_capacity, + ), + InputComponentParameter( + id="injection_nominal_capacity", + time_dependent=False, + scenario_dependent=False, + value=storage.properties.injection_nominal_capacity, + ), + InputComponentParameter( + id="withdrawal_nominal_capacity", + time_dependent=False, + scenario_dependent=False, + value=storage.properties.withdrawal_nominal_capacity, + ), + InputComponentParameter( + id="inflows", + time_dependent=True, + scenario_dependent=True, + value=str(inflows_path), + ), + InputComponentParameter( + id="lower_rule_curve", + time_dependent=True, + scenario_dependent=True, + value=str(lower_rule_curve_path), + ), + InputComponentParameter( + id="p_max_injection_modulation", + time_dependent=True, + scenario_dependent=True, + value=str(pmax_injection_path), + ), + InputComponentParameter( + id="p_max_withdrawal_modulation", + time_dependent=True, + scenario_dependent=True, + value=str(pmax_withdrawal_path), + ), + InputComponentParameter( + id="upper_rule_curve", + time_dependent=True, + scenario_dependent=True, + value=str(upper_rule_curve_path), + ), + ], + ) + ) + + connections.append( + InputPortConnections( + component1=storage.id, + port1="injection_port", + component2=area.id, + port2="balance_port", + ) + ) + return components, connections + def _convert_link_to_component_list( self, ) -> tuple[list[InputComponent], list[InputPortConnections]]: @@ -446,6 +555,7 @@ def convert_study_to_input_study(self) -> InputStudy: conversion_methods = [ self._convert_renewable_to_component_list, self._convert_thermal_to_component_list, + self._convert_st_storage_to_component_list, self._convert_load_to_component_list, self._convert_wind_to_component_list, self._convert_solar_to_component_list, diff --git a/src/andromede/study/resolve_components.py b/src/andromede/study/resolve_components.py index 1e1cc01b..75c02b90 100644 --- a/src/andromede/study/resolve_components.py +++ b/src/andromede/study/resolve_components.py @@ -167,6 +167,7 @@ def _build_data( ) -> AbstractDataStructure: if isinstance(param_value, str): # Should happen only if time-dependent or scenario-dependent + print("coucou", timeseries_dir, param_value) ts_data = load_ts_from_txt(param_value, timeseries_dir) if time_dependent and scenario_dependent: return TimeScenarioSeriesData(ts_data, scenarization) diff --git a/tests/antares_historic/conftest.py b/tests/antares_historic/conftest.py index 4ce883e7..b4859359 100644 --- a/tests/antares_historic/conftest.py +++ b/tests/antares_historic/conftest.py @@ -11,6 +11,7 @@ # This file is part of the Antares project. import pytest from antares.craft.model.area import AreaProperties +from antares.craft.model.st_storage import STStorageProperties from antares.craft.model.study import Study, create_study_local from antares.craft.model.thermal import ( LawOption, @@ -105,3 +106,33 @@ def local_study_end_to_end_w_thermal(local_study, default_thermal_cluster_proper thermal_name, properties=default_thermal_cluster_properties ) return local_study + + +@pytest.fixture +def default_st_storage_cluster_properties() -> STStorageProperties: + return STStorageProperties( + injection_nominal_capacity=10, + withdrawal_nominal_capacity=10, + reservoir_capacity=0, + efficiency=1, + initial_level=0.5, + initial_level_optim=False, + enabled=True, + ) + + +@pytest.fixture +def local_study_with_st_storage( + local_study_end_to_end_w_thermal, default_st_storage_cluster_properties +) -> Study: + """ + Create an empty study + Create an area with custom area properties + Create a thermal cluster with custom thermal properties + Create a short term storage + """ + storage_name = "battery" + local_study_end_to_end_w_thermal.get_areas()["fr"].create_st_storage( + storage_name, properties=default_st_storage_cluster_properties + ) + return local_study_end_to_end_w_thermal diff --git a/tests/antares_historic/test_antares_historic.py b/tests/antares_historic/test_antares_historic.py index adb234f7..c5d81d5a 100644 --- a/tests/antares_historic/test_antares_historic.py +++ b/tests/antares_historic/test_antares_historic.py @@ -1,3 +1,4 @@ +from dataclasses import dataclass from pathlib import Path import pandas as pd @@ -8,6 +9,7 @@ from andromede.model.parsing import InputLibrary, parse_yaml_library from andromede.model.resolve_library import resolve_library from andromede.simulation import TimeBlock, build_problem +from andromede.simulation.optimization import OptimizationProblem from andromede.study.data import load_ts_from_txt from andromede.study.parsing import InputStudy, parse_yaml_components from andromede.study.resolve_components import ( @@ -18,6 +20,12 @@ ) +@dataclass(frozen=True) +class ToolTestStudy: + study_component_data: InputStudy + study_path: Path + + def create_csv_from_constant_value( path, filename: str, lines: int, columns: int = 1, value: float = 1 ) -> None: @@ -124,7 +132,7 @@ def fill_timeseries(study_path) -> None: create_csv_from_constant_value(path=series_path, filename="nb_units_max", lines=3) -def _setup_study_component(study, period=None) -> tuple: +def _setup_study_component(study, period=None) -> ToolTestStudy: """ Helper function to reduce redundancy in study component setup. """ @@ -142,19 +150,24 @@ def _setup_study_component(study, period=None) -> tuple: compo_file = converter.output_path with compo_file.open() as c: - return parse_yaml_components(c), study_path + return ToolTestStudy(parse_yaml_components(c), study_path) @pytest.fixture -def study_component_basic(local_study_end_to_end_simple) -> tuple: +def study_component_basic(local_study_end_to_end_simple) -> ToolTestStudy: return _setup_study_component(local_study_end_to_end_simple) @pytest.fixture -def study_component_thermal(local_study_end_to_end_w_thermal) -> tuple: +def study_component_thermal(local_study_end_to_end_w_thermal) -> ToolTestStudy: return _setup_study_component(local_study_end_to_end_w_thermal, period=3) +@pytest.fixture +def study_component_st_storage(local_study_with_st_storage) -> ToolTestStudy: + return _setup_study_component(local_study_with_st_storage, period=3) + + @pytest.fixture def input_library( data_dir: Path, @@ -171,9 +184,9 @@ def input_library( return parse_yaml_library(lib) -def factory_balance_using_converter( - study_component: InputStudy, input_library: InputLibrary, expected_value: int -) -> None: +def problem_builder( + study_test_component: ToolTestStudy, input_library: InputLibrary +) -> OptimizationProblem: """ - Resolves the input library. - Constructs components and connections. @@ -181,8 +194,8 @@ def factory_balance_using_converter( - Builds the database and network. - Solves the optimization problem and verifies results. """ - study_path = study_component[1] - study_component_data = study_component[0] + study_path = study_test_component.study_path + study_component_data = study_test_component.study_component_data result_lib = resolve_library([input_library]) components_input = resolve_components_and_cnx(study_component_data, result_lib) @@ -192,10 +205,7 @@ def factory_balance_using_converter( network = build_network(components_input) scenarios = 1 - problem = build_problem(network, database, TimeBlock(1, [0, 1]), scenarios) - status = problem.solver.Solve() - assert status == problem.solver.OPTIMAL - assert problem.solver.Objective().Value() == expected_value + return build_problem(network, database, TimeBlock(1, [0, 1]), scenarios) def test_basic_balance_using_converter( @@ -204,9 +214,10 @@ def test_basic_balance_using_converter( """ Test basic study balance using the converter. """ - factory_balance_using_converter( - study_component_basic, input_library, expected_value=150 - ) + problem = problem_builder(study_component_basic, input_library) + status = problem.solver.Solve() + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 150 def test_thermal_balance_using_converter( @@ -215,6 +226,23 @@ def test_thermal_balance_using_converter( """ Test thermal study balance using the converter. """ - factory_balance_using_converter( - study_component_thermal, input_library, expected_value=165 - ) + problem = problem_builder(study_component_thermal, input_library) + + status = problem.solver.Solve() + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 165 + + +@pytest.mark.skip(reason="Test temporarily deactivated") +def test_storage_balance_using_converter( + study_component_st_storage: InputStudy, input_library: InputLibrary +) -> None: + """ + Test storage study balance using the converter. + """ + # Wait for new version 0.92 of antares craft which include efficiencywithdrawalparameter + problem = problem_builder(study_component_st_storage, input_library) + + status = problem.solver.Solve() + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 165 diff --git a/tests/input_converter/conftest.py b/tests/input_converter/conftest.py index 528bbbd6..aba41c50 100644 --- a/tests/input_converter/conftest.py +++ b/tests/input_converter/conftest.py @@ -14,6 +14,7 @@ from antares.craft.model.area import Area, AreaProperties from antares.craft.model.hydro import HydroProperties from antares.craft.model.renewable import RenewableClusterProperties +from antares.craft.model.st_storage import STStorageProperties from antares.craft.model.study import Study, create_study_local from antares.craft.model.thermal import ThermalClusterProperties from antares.craft.tools.ini_tool import IniFile, InitializationFilesTypes @@ -134,7 +135,22 @@ def actual_renewable_list_ini(local_study_with_renewable) -> IniFile: @pytest.fixture -def local_study_with_st_storage(local_study_with_renewable) -> Study: +def default_thermal_cluster_properties() -> STStorageProperties: + return STStorageProperties( + injection_nominal_capacity=10, + withdrawal_nominal_capacity=10, + reservoir_capacity=0, + efficiency=1, + initial_level=0.5, + initial_level_optim=False, + enabled=True, + ) + + +@pytest.fixture +def local_study_with_st_storage( + local_study_with_renewable, default_thermal_cluster_properties +) -> Study: """ Create an empty study Create 2 areas with custom area properties @@ -143,8 +159,10 @@ def local_study_with_st_storage(local_study_with_renewable) -> Study: Create a renewable cluster Create a short term storage """ - storage_name = "short term storage" - local_study_with_renewable.get_areas()["fr"].create_st_storage(storage_name) + storage_name = "battery" + local_study_with_renewable.get_areas()["fr"].create_st_storage( + storage_name, properties=default_thermal_cluster_properties + ) return local_study_with_renewable diff --git a/tests/input_converter/test_converter.py b/tests/input_converter/test_converter.py index 6026ef64..05db4fe3 100644 --- a/tests/input_converter/test_converter.py +++ b/tests/input_converter/test_converter.py @@ -212,6 +212,150 @@ def test_convert_renewables_to_component(self, local_study_with_renewable): assert renewables_components == expected_renewable_component assert renewable_connections == expected_renewable_connections + def test_convert_st_storages_to_component(self, local_study_with_st_storage): + areas, converter = self._init_area_reading(local_study_with_st_storage) + study_path = converter.study_path + ( + storage_components, + storage_connections, + ) = converter._convert_st_storage_to_component_list(areas) + + inflows_path = ( + study_path + / "input" + / "st-storage" + / "series" + / "fr" + / "battery" + / "inflows" + ) + lower_rule_curve_path = ( + study_path + / "input" + / "st-storage" + / "series" + / "fr" + / "battery" + / "lower-rule-curve" + ) + pmax_injection_path = ( + study_path + / "input" + / "st-storage" + / "series" + / "fr" + / "battery" + / "PMAX-injection" + ) + pmax_withdrawal_path = ( + study_path + / "input" + / "st-storage" + / "series" + / "fr" + / "battery" + / "PMAX-withdrawal" + ) + upper_rule_curve_path = ( + study_path + / "input" + / "st-storage" + / "series" + / "fr" + / "battery" + / "upper-rule-curve" + ) + expected_storage_connections = [ + InputPortConnections( + component1="battery", + port1="injection_port", + component2="fr", + port2="balance_port", + ) + ] + expected_storage_component = [ + InputComponent( + id="battery", + model="short-term-storage", + scenario_group=None, + parameters=[ + InputComponentParameter( + id="efficiency_injection", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=1, + ), + InputComponentParameter( + id="initial_level", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=0.5, + ), + InputComponentParameter( + id="reservoir_capacity", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=0.0, + ), + InputComponentParameter( + id="injection_nominal_capacity", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=10.0, + ), + InputComponentParameter( + id="withdrawal_nominal_capacity", + time_dependent=False, + scenario_dependent=False, + scenario_group=None, + value=10.0, + ), + InputComponentParameter( + id="inflows", + time_dependent=True, + scenario_dependent=True, + scenario_group=None, + value=f"{inflows_path}", + ), + InputComponentParameter( + id="lower_rule_curve", + time_dependent=True, + scenario_dependent=True, + scenario_group=None, + value=f"{lower_rule_curve_path}", + ), + InputComponentParameter( + id="p_max_injection_modulation", + time_dependent=True, + scenario_dependent=True, + scenario_group=None, + value=f"{pmax_injection_path}", + ), + InputComponentParameter( + id="p_max_withdrawal_modulation", + time_dependent=True, + scenario_dependent=True, + scenario_group=None, + value=f"{pmax_withdrawal_path}", + ), + InputComponentParameter( + id="upper_rule_curve", + time_dependent=True, + scenario_dependent=True, + scenario_group=None, + value=f"{upper_rule_curve_path}", + ), + ], + ) + ] + + assert storage_components == expected_storage_component + assert storage_connections == expected_storage_connections + def test_convert_thermals_to_component( self, local_study_w_thermal, create_csv_from_constant_value ): @@ -387,8 +531,6 @@ def test_convert_thermals_to_component( ], ) ] - print("ACTUAL:", thermals_components) - print("EXPECTED:", expected_thermals_components) assert thermals_components == expected_thermals_components assert thermals_connections == expected_thermals_connections @@ -770,122 +912,3 @@ def _generate_tdp_instance_parameter( if thermal.area_id == "fr": tdp = ThermalDataPreprocessing(thermal, study_path) return tdp - - def _setup_test(self, local_study_w_thermal, filename): - """ - Initializes test parameters and returns the instance and expected file path. - """ - areas, converter = self._init_area_reading(local_study_w_thermal) - study_path = converter.study_path - instance = self._generate_tdp_instance_parameter(areas, study_path) - expected_path = ( - study_path / "input" / "thermal" / "series" / "fr" / "gaz" / filename - ) - return instance, expected_path - - def _validate_component( - self, instance, process_method, expected_path, expected_values - ): - """ - Executes the given processing method, validates the component, and compares the output dataframe. - """ - component = getattr(instance, process_method)() - expected_component = InputComponentParameter( - id=process_method.split("process_")[1], - time_dependent=True, - scenario_dependent=True, - value=str(expected_path), - ) - current_df = pd.read_csv(expected_path.with_suffix(".txt"), header=None) - expected_df = pd.DataFrame(expected_values) - assert current_df.equals(expected_df) - assert component == expected_component - - def _test_p_min_cluster(self, local_study_w_thermal): - """Tests the p_min_cluster parameter processing.""" - instance, expected_path = self._setup_test( - local_study_w_thermal, "p_min_cluster.txt" - ) - expected_values = [ - [6.0], - [10.0], - [2.0], - ] # min(min_gen_modulation * unit_count * nominal_capacity, p_max_cluster) - self._validate_component( - instance, "process_p_min_cluster", expected_path, expected_values - ) - - def test_nb_units_min(self, local_study_w_thermal): - """Tests the nb_units_min parameter processing.""" - instance, expected_path = self._setup_test( - local_study_w_thermal, "nb_units_min" - ) - instance.process_p_min_cluster() - expected_values = [[2.0], [5.0], [1.0]] # ceil(p_min_cluster / p_max_unit) - self._validate_component( - instance, "process_nb_units_min", expected_path, expected_values - ) - - def test_nb_units_max(self, local_study_w_thermal): - """Tests the nb_units_max parameter processing.""" - instance, expected_path = self._setup_test( - local_study_w_thermal, "nb_units_max" - ) - instance.process_p_min_cluster() - expected_values = [[4.0], [5.0], [1.0]] # ceil(p_max_cluster / p_max_unit) - self._validate_component( - instance, "process_nb_units_max", expected_path, expected_values - ) - - @pytest.mark.parametrize("direction", ["forward", "backward"]) - def test_nb_units_max_variation( - self, local_study_w_thermal, create_csv_from_constant_value, direction - ): - """ - Tests nb_units_max_variation_forward and nb_units_max_variation_backward processing. - """ - instance, expected_path = self._setup_test( - local_study_w_thermal, f"nb_units_max_variation_{direction}" - ) - modulation_timeseries = ( - instance.study_path / "input" / "thermal" / "prepro" / "fr" / "gaz" - ) - series_path = ( - instance.study_path / "input" / "thermal" / "series" / "fr" / "gaz" - ) - create_csv_from_constant_value(modulation_timeseries, "modulation", 840, 4) - create_csv_from_constant_value(series_path, "series", 840) - instance.process_nb_units_max() - nb_units_max_output = pd.read_csv( - instance.series_path / "nb_units_max.txt", header=None - ) - - variation_component = getattr( - instance, f"process_nb_units_max_variation_{direction}" - )() - current_df = pd.read_csv(variation_component.value + ".txt", header=None) - - assert current_df[0][0] == max( - 0, nb_units_max_output[0][167] - nb_units_max_output[0][0] - ) - assert current_df[0][3] == max( - 0, nb_units_max_output[0][2] - nb_units_max_output[0][3] - ) - assert current_df[0][168] == max( - 0, nb_units_max_output[0][335] - nb_units_max_output[0][168] - ) - assert variation_component.value == str(expected_path) - - def test_nb_units_max_variation_forward( - self, local_study_w_thermal, create_csv_from_constant_value - ): - self.test_nb_units_max_variation( - local_study_w_thermal, create_csv_from_constant_value, direction="forward" - ) - - def test_nb_units_max_variation_backward( - self, local_study_w_thermal, create_csv_from_constant_value - ): - self.test_nb_units_max_variation( - local_study_w_thermal, create_csv_from_constant_value, direction="backward" - ) diff --git a/tests/input_converter/test_thermal_preprocessing.py b/tests/input_converter/test_thermal_preprocessing.py new file mode 100644 index 00000000..59e5bd72 --- /dev/null +++ b/tests/input_converter/test_thermal_preprocessing.py @@ -0,0 +1,185 @@ +from dataclasses import replace + +import pandas as pd +import pytest + +from andromede.input_converter.src.converter import AntaresStudyConverter +from andromede.input_converter.src.data_preprocessing.thermal import ( + ThermalDataPreprocessing, +) +from andromede.input_converter.src.logger import Logger +from andromede.study.parsing import ( + InputComponentParameter, +) + + +class TestPreprocessingThermal: + def _init_area_reading(self, local_study): + logger = Logger(__name__, local_study.service.config.study_path) + converter = AntaresStudyConverter(study_input=local_study, logger=logger) + areas = converter.study.get_areas().values() + return areas, converter + + def _generate_tdp_instance_parameter( + self, areas, study_path, create_dataframes: bool = True + ): + if create_dataframes: + modulation_timeseries = str( + study_path + / "input" + / "thermal" + / "prepro" + / "fr" + / "gaz" + / "modulation.txt" + ) + series_path = ( + study_path + / "input" + / "thermal" + / "series" + / "fr" + / "gaz" + / "series.txt" + ) + data_p_max = [ + [1, 1, 1, 2], + [2, 2, 2, 6], + [3, 3, 3, 1], + ] + data_series = [ + [8], + [10], + [2], + ] + df = pd.DataFrame(data_p_max) + df.to_csv(modulation_timeseries, sep="\t", index=False, header=False) + + df = pd.DataFrame(data_series) + df.to_csv(series_path, sep="\t", index=False, header=False) + + for area in areas: + thermals = area.get_thermals() + for thermal in thermals.values(): + if thermal.area_id == "fr": + tdp = ThermalDataPreprocessing(thermal, study_path) + return tdp + + def _setup_test(self, local_study_w_thermal, filename): + """ + Initializes test parameters and returns the instance and expected file path. + """ + areas, converter = self._init_area_reading(local_study_w_thermal) + study_path = converter.study_path + instance = self._generate_tdp_instance_parameter(areas, study_path) + expected_path = ( + study_path / "input" / "thermal" / "series" / "fr" / "gaz" / filename + ) + return instance, expected_path + + def _validate_component( + self, instance, process_method, expected_path, expected_values + ): + """ + Executes the given processing method, validates the component, and compares the output dataframe. + """ + component = getattr(instance, process_method)() + expected_component = InputComponentParameter( + id=process_method.split("process_")[1], + time_dependent=True, + scenario_dependent=True, + value=str(expected_path), + ) + current_df = pd.read_csv(expected_path.with_suffix(".txt"), header=None) + expected_df = pd.DataFrame(expected_values) + assert current_df.equals(expected_df) + assert component == expected_component + + def _test_p_min_cluster(self, local_study_w_thermal): + """Tests the p_min_cluster parameter processing.""" + instance, expected_path = self._setup_test( + local_study_w_thermal, "p_min_cluster.txt" + ) + expected_values = [ + [6.0], + [10.0], + [2.0], + ] # min(min_gen_modulation * unit_count * nominal_capacity, p_max_cluster) + self._validate_component( + instance, "process_p_min_cluster", expected_path, expected_values + ) + + def test_nb_units_min(self, local_study_w_thermal): + """Tests the nb_units_min parameter processing.""" + instance, expected_path = self._setup_test( + local_study_w_thermal, "nb_units_min" + ) + instance.process_p_min_cluster() + expected_values = [[2.0], [5.0], [1.0]] # ceil(p_min_cluster / p_max_unit) + self._validate_component( + instance, "process_nb_units_min", expected_path, expected_values + ) + + def test_nb_units_max(self, local_study_w_thermal): + """Tests the nb_units_max parameter processing.""" + instance, expected_path = self._setup_test( + local_study_w_thermal, "nb_units_max" + ) + instance.process_p_min_cluster() + expected_values = [[4.0], [5.0], [1.0]] # ceil(p_max_cluster / p_max_unit) + self._validate_component( + instance, "process_nb_units_max", expected_path, expected_values + ) + + @pytest.mark.parametrize("direction", ["forward", "backward"]) + def test_nb_units_max_variation( + self, local_study_w_thermal, create_csv_from_constant_value, direction + ): + """ + Tests nb_units_max_variation_forward and nb_units_max_variation_backward processing. + """ + instance, expected_path = self._setup_test( + local_study_w_thermal, f"nb_units_max_variation_{direction}" + ) + modulation_timeseries = ( + instance.study_path / "input" / "thermal" / "prepro" / "fr" / "gaz" + ) + series_path = ( + instance.study_path / "input" / "thermal" / "series" / "fr" / "gaz" + ) + create_csv_from_constant_value(modulation_timeseries, "modulation", 840, 4) + create_csv_from_constant_value(series_path, "series", 840) + instance.process_nb_units_max() + nb_units_max_output = pd.read_csv( + instance.series_path / "nb_units_max.txt", header=None + ) + + variation_component = getattr( + instance, f"process_nb_units_max_variation_{direction}" + )() + current_df = pd.read_csv(variation_component.value + ".txt", header=None) + + assert current_df[0][0] == max( + 0, nb_units_max_output[0][167] - nb_units_max_output[0][0] + ) + assert current_df[0][3] == max( + 0, nb_units_max_output[0][2] - nb_units_max_output[0][3] + ) + assert current_df[0][168] == max( + 0, nb_units_max_output[0][335] - nb_units_max_output[0][168] + ) + assert variation_component.value == str(expected_path) + + def test_nb_units_max_variation_forward( + self, local_study_w_thermal, create_csv_from_constant_value + ): + self.test_nb_units_max_variation( + local_study_w_thermal, create_csv_from_constant_value, direction="forward" + ) + + def test_nb_units_max_variation_backward( + self, local_study_w_thermal, create_csv_from_constant_value + ): + self.test_nb_units_max_variation( + local_study_w_thermal, create_csv_from_constant_value, direction="backward" + ) From 43998302ffcc162faf6b15b5a15c5bf2bbb02534 Mon Sep 17 00:00:00 2001 From: killian-scalian Date: Mon, 31 Mar 2025 17:58:53 +0200 Subject: [PATCH 2/3] bump antares craft version --- requirements-dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 4dc6c919..23a723cd 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,7 +8,7 @@ absl-py==2.1.0 # via ortools annotated-types==0.7.0 # via pydantic -antares-craft==0.2.1 +antares-craft==0.2.3 # via -r requirements.in antares-study-version==1.0.9 # via antares-craft From 7c676ed28b36b7784495735cdea5a4431fab1136 Mon Sep 17 00:00:00 2001 From: killian-scalian Date: Tue, 1 Apr 2025 09:35:03 +0200 Subject: [PATCH 3/3] remove traces --- src/andromede/study/resolve_components.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/andromede/study/resolve_components.py b/src/andromede/study/resolve_components.py index 75c02b90..1e1cc01b 100644 --- a/src/andromede/study/resolve_components.py +++ b/src/andromede/study/resolve_components.py @@ -167,7 +167,6 @@ def _build_data( ) -> AbstractDataStructure: if isinstance(param_value, str): # Should happen only if time-dependent or scenario-dependent - print("coucou", timeseries_dir, param_value) ts_data = load_ts_from_txt(param_value, timeseries_dir) if time_dependent and scenario_dependent: return TimeScenarioSeriesData(ts_data, scenarization)