diff --git a/src/andromede/libs/standard.py b/src/andromede/libs/standard.py index b33c1cfc..9a6df24a 100644 --- a/src/andromede/libs/standard.py +++ b/src/andromede/libs/standard.py @@ -63,32 +63,6 @@ .sum() .expec(), ) -""" -A standard model for a linear cost generation, limited by a maximum generation. -""" -GENERATOR_MODEL = model( - id="GEN", - parameters=[ - float_parameter("p_max", CONSTANT), - float_parameter("cost", CONSTANT), - ], - variables=[float_variable("generation", lower_bound=literal(0))], - ports=[ModelPort(port_type=BALANCE_PORT_TYPE, port_name="balance_port")], - port_fields_definitions=[ - PortFieldDefinition( - port_field=PortFieldId("balance_port", "flow"), - definition=var("generation"), - ) - ], - constraints=[ - Constraint( - name="Max generation", expression=var("generation") <= param("p_max") - ), - ], - objective_operational_contribution=(param("cost") * var("generation")) - .sum() - .expec(), -) """ Basic link model using ports @@ -132,6 +106,33 @@ ], ) +""" +A standard model for a linear cost generation, limited by a maximum generation. +""" +GENERATOR_MODEL = model( + id="GEN", + parameters=[ + float_parameter("p_max", CONSTANT), + float_parameter("cost", CONSTANT), + ], + variables=[float_variable("generation", lower_bound=literal(0))], + ports=[ModelPort(port_type=BALANCE_PORT_TYPE, port_name="balance_port")], + port_fields_definitions=[ + PortFieldDefinition( + port_field=PortFieldId("balance_port", "flow"), + definition=var("generation"), + ) + ], + constraints=[ + Constraint( + name="Max generation", expression=var("generation") <= param("p_max") + ), + ], + objective_operational_contribution=(param("cost") * var("generation")) + .sum() + .expec(), +) + GENERATOR_MODEL_WITH_PMIN = model( id="GEN", parameters=[ @@ -162,6 +163,39 @@ .expec(), ) +""" +A model for a linear cost generation limited by a maximum generation per time-step +and total generation in whole period. It considers a full storage with no replenishing +""" +GENERATOR_MODEL_WITH_STORAGE = model( + id="GEN", + parameters=[ + float_parameter("p_max", CONSTANT), + float_parameter("cost", CONSTANT), + float_parameter("full_storage", CONSTANT), + ], + variables=[float_variable("generation", lower_bound=literal(0))], + ports=[ModelPort(port_type=BALANCE_PORT_TYPE, port_name="balance_port")], + port_fields_definitions=[ + PortFieldDefinition( + port_field=PortFieldId("balance_port", "flow"), + definition=var("generation"), + ) + ], + constraints=[ + Constraint( + name="Max generation", expression=var("generation") <= param("p_max") + ), + Constraint( + name="Total storage", + expression=var("generation").sum() <= param("full_storage"), + ), + ], + objective_operational_contribution=(param("cost") * var("generation")) + .sum() + .expec(), +) + # For now, no starting cost THERMAL_CLUSTER_MODEL_HD = model( id="GEN", diff --git a/tests/functional/test_performance.py b/tests/functional/test_performance.py index 28d5d88d..d8b2321c 100644 --- a/tests/functional/test_performance.py +++ b/tests/functional/test_performance.py @@ -10,15 +10,20 @@ # # This file is part of the Antares project. -from andromede.expression.expression import literal, param +from typing import cast + +import pytest + +from andromede.expression.expression import ExpressionNode, literal, param, var from andromede.expression.indexing_structure import IndexingStructure from andromede.libs.standard import ( BALANCE_PORT_TYPE, DEMAND_MODEL, GENERATOR_MODEL, + GENERATOR_MODEL_WITH_STORAGE, NODE_BALANCE_MODEL, ) -from andromede.model import ModelPort, float_parameter, model +from andromede.model import float_parameter, float_variable, model from andromede.simulation import TimeBlock, build_problem from andromede.study import ( ConstantData, @@ -28,16 +33,18 @@ PortRef, create_component, ) -from tests.unittests.test_utils import generate_data +from tests.unittests.test_utils import generate_scalar_matrix_data -def test_large_sum_with_loop() -> None: - """ - Test performance when the problem involves an expression with a high number of terms. Here the objective function is the sum over nb_terms terms. +def test_large_sum_inside_model_with_loop() -> None: """ + Test performance when the problem involves an expression with a high number of terms. + Here the objective function is the sum over nb_terms terms on a for-loop inside the model - # This test pass with 476 terms but fails with 477 locally due to recursion depth, and even less terms are possible with Jenkins... - nb_terms = 100 + This test pass with 476 terms but fails with 477 locally due to recursion depth, + and even less terms are possible with Jenkins... + """ + nb_terms = 500 time_blocks = [TimeBlock(0, [0])] scenarios = 1 @@ -46,38 +53,38 @@ def test_large_sum_with_loop() -> None: for i in range(1, nb_terms): database.add_data("simple_cost", f"cost_{i}", ConstantData(1 / i)) - SIMPLE_COST_MODEL = model( - id="SIMPLE_COST", - parameters=[ - float_parameter(f"cost_{i}", IndexingStructure(False, False)) - for i in range(1, nb_terms) - ], - objective_operational_contribution=sum( - [param(f"cost_{i}") for i in range(1, nb_terms)] - ), - ) - - network = Network("test") + with pytest.raises(RecursionError, match="maximum recursion depth exceeded"): + SIMPLE_COST_MODEL = model( + id="SIMPLE_COST", + parameters=[ + float_parameter(f"cost_{i}", IndexingStructure(False, False)) + for i in range(1, nb_terms) + ], + objective_operational_contribution=cast( + ExpressionNode, sum(param(f"cost_{i}") for i in range(1, nb_terms)) + ), + ) - # for i in range(1, nb_terms + 1): - cost_model = create_component(model=SIMPLE_COST_MODEL, id="simple_cost") - network.add_component(cost_model) + # Won't run because last statement will raise the error + network = Network("test") + cost_model = create_component(model=SIMPLE_COST_MODEL, id="simple_cost") + network.add_component(cost_model) - problem = build_problem(network, database, time_blocks[0], scenarios) - status = problem.solver.Solve() + problem = build_problem(network, database, time_blocks[0], scenarios) + status = problem.solver.Solve() - assert status == problem.solver.OPTIMAL - assert problem.solver.Objective().Value() == sum( - [1 / i for i in range(1, nb_terms)] - ) + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == sum( + [1 / i for i in range(1, nb_terms)] + ) -def test_large_sum_outside_model() -> None: +def test_large_sum_outside_model_with_loop() -> None: """ - Test performance when the problem involves an expression with a high number of terms. Here the objective function is the sum over nb_terms terms. + Test performance when the problem involves an expression with a high number of terms. + Here the objective function is the sum over nb_terms terms on a for-loop outside the model """ - - nb_terms = 10000 + nb_terms = 10_000 time_blocks = [TimeBlock(0, [0])] scenarios = 1 @@ -93,7 +100,6 @@ def test_large_sum_outside_model() -> None: network = Network("test") - # for i in range(1, nb_terms + 1): simple_model = create_component( model=SIMPLE_COST_MODEL, id="simple_cost", @@ -104,10 +110,101 @@ def test_large_sum_outside_model() -> None: status = problem.solver.Solve() assert status == problem.solver.OPTIMAL - assert problem.solver.Objective().Value() == sum( - [1 / i for i in range(1, nb_terms)] + assert problem.solver.Objective().Value() == obj_coeff + + +def test_large_sum_inside_model_with_sum_operator() -> None: + """ + Test performance when the problem involves an expression with a high number of terms. + Here the objective function is the sum over nb_terms terms with the sum() operator inside the model + """ + nb_terms = 10_000 + + scenarios = 1 + time_blocks = [TimeBlock(0, list(range(nb_terms)))] + database = DataBase() + + # Weird values when the "cost" varies over time and we use the sum() operator: + # For testing purposes, will use a const value since the problem seems to come when + # we try to linearize nb_terms variables with nb_terms distinct parameters + # TODO check the sum() operator for time-variable parameters + database.add_data("simple_cost", "cost", ConstantData(3)) + + SIMPLE_COST_MODEL = model( + id="SIMPLE_COST", + parameters=[ + float_parameter("cost", IndexingStructure(False, False)), + ], + variables=[ + float_variable( + "var", + lower_bound=literal(1), + upper_bound=literal(1), + structure=IndexingStructure(True, False), + ), + ], + objective_operational_contribution=(param("cost") * var("var")).sum(), ) + network = Network("test") + + cost_model = create_component(model=SIMPLE_COST_MODEL, id="simple_cost") + network.add_component(cost_model) + + problem = build_problem(network, database, time_blocks[0], scenarios) + status = problem.solver.Solve() + + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 3 * nb_terms + + +def test_large_sum_of_port_connections() -> None: + """ + Test performance when the problem involves a model where several generators are connected to a node. + + This test pass with 470 terms but fails with 471 locally due to recursion depth, + and possibly even less terms are possible with Jenkins... + """ + nb_generators = 500 + + time_block = TimeBlock(0, [0]) + scenarios = 1 + + database = DataBase() + database.add_data("D", "demand", ConstantData(nb_generators)) + + for gen_id in range(nb_generators): + database.add_data(f"G_{gen_id}", "p_max", ConstantData(1)) + database.add_data(f"G_{gen_id}", "cost", ConstantData(5)) + + node = Node(model=NODE_BALANCE_MODEL, id="N") + demand = create_component(model=DEMAND_MODEL, id="D") + generators = [ + create_component(model=GENERATOR_MODEL, id=f"G_{gen_id}") + for gen_id in range(nb_generators) + ] + + network = Network("test") + network.add_node(node) + + network.add_component(demand) + network.connect(PortRef(demand, "balance_port"), PortRef(node, "balance_port")) + + for gen_id in range(nb_generators): + network.add_component(generators[gen_id]) + network.connect( + PortRef(generators[gen_id], "balance_port"), PortRef(node, "balance_port") + ) + + with pytest.raises(RecursionError, match="maximum recursion depth exceeded"): + problem = build_problem(network, database, time_block, scenarios) + + # Won't run because last statement will raise the error + status = problem.solver.Solve() + + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 5 * nb_generators + def test_basic_balance_on_whole_year() -> None: """ @@ -119,7 +216,9 @@ def test_basic_balance_on_whole_year() -> None: time_block = TimeBlock(1, list(range(horizon))) database = DataBase() - database.add_data("D", "demand", generate_data(100, horizon, scenarios)) + database.add_data( + "D", "demand", generate_scalar_matrix_data(100, horizon, scenarios) + ) database.add_data("G", "p_max", ConstantData(100)) database.add_data("G", "cost", ConstantData(30)) @@ -140,4 +239,42 @@ def test_basic_balance_on_whole_year() -> None: status = problem.solver.Solve() assert status == problem.solver.OPTIMAL - assert problem.solver.Objective().Value() == 3000 * horizon + assert problem.solver.Objective().Value() == 30 * 100 * horizon + + +def test_basic_balance_on_whole_year_with_large_sum() -> None: + """ + Balance on one node with one fixed demand and one generation with storage, on 8760 timestep. + """ + + scenarios = 1 + horizon = 8760 + time_block = TimeBlock(1, list(range(horizon))) + + database = DataBase() + database.add_data( + "D", "demand", generate_scalar_matrix_data(100, horizon, scenarios) + ) + + database.add_data("G", "p_max", ConstantData(100)) + database.add_data("G", "cost", ConstantData(30)) + database.add_data("G", "full_storage", ConstantData(100 * horizon)) + + node = Node(model=NODE_BALANCE_MODEL, id="N") + demand = create_component(model=DEMAND_MODEL, id="D") + gen = create_component( + model=GENERATOR_MODEL_WITH_STORAGE, id="G" + ) # Limits the total generation inside a TimeBlock + + network = Network("test") + network.add_node(node) + network.add_component(demand) + network.add_component(gen) + network.connect(PortRef(demand, "balance_port"), PortRef(node, "balance_port")) + network.connect(PortRef(gen, "balance_port"), PortRef(node, "balance_port")) + + problem = build_problem(network, database, time_block, scenarios) + status = problem.solver.Solve() + + assert status == problem.solver.OPTIMAL + assert problem.solver.Objective().Value() == 30 * 100 * horizon diff --git a/tests/functional/test_stochastic.py b/tests/functional/test_stochastic.py index d19914f1..97fc7971 100644 --- a/tests/functional/test_stochastic.py +++ b/tests/functional/test_stochastic.py @@ -29,7 +29,7 @@ PortRef, create_component, ) -from tests.unittests.test_utils import generate_data +from tests.unittests.test_utils import generate_scalar_matrix_data @pytest.fixture @@ -46,11 +46,19 @@ def scenarios() -> int: def database(horizon: int, scenarios: int) -> DataBase: database = DataBase() - database.add_data("D", "demand", generate_data(500, horizon, scenarios)) - - database.add_data("BASE", "nb_failures", generate_data(1, horizon, scenarios)) - database.add_data("SEMIBASE", "nb_failures", generate_data(1, horizon, scenarios)) - database.add_data("PEAK", "nb_failures", generate_data(1, horizon, scenarios)) + database.add_data( + "D", "demand", generate_scalar_matrix_data(500, horizon, scenarios) + ) + + database.add_data( + "BASE", "nb_failures", generate_scalar_matrix_data(1, horizon, scenarios) + ) + database.add_data( + "SEMIBASE", "nb_failures", generate_scalar_matrix_data(1, horizon, scenarios) + ) + database.add_data( + "PEAK", "nb_failures", generate_scalar_matrix_data(1, horizon, scenarios) + ) database.add_data("BASE", "p_max", ConstantData(250)) database.add_data("BASE", "p_min", ConstantData(100)) diff --git a/tests/unittests/test_utils.py b/tests/unittests/test_utils.py index 5d24a348..f88477eb 100644 --- a/tests/unittests/test_utils.py +++ b/tests/unittests/test_utils.py @@ -10,10 +10,17 @@ # # This file is part of the Antares project. +from typing import List + import pandas as pd import pytest -from andromede.study import TimeScenarioIndex, TimeScenarioSeriesData +from andromede.study import ( + TimeIndex, + TimeScenarioIndex, + TimeScenarioSeriesData, + TimeSeriesData, +) from andromede.utils import get_or_add @@ -33,7 +40,9 @@ def value_factory() -> str: assert get_or_add(d, "key2", value_factory) == "value2" -def generate_data(value: float, horizon: int, scenarios: int) -> TimeScenarioSeriesData: +def generate_scalar_matrix_data( + value: float, horizon: int, scenarios: int +) -> TimeScenarioSeriesData: data = pd.DataFrame(index=range(horizon), columns=range(scenarios)) data.fillna(value, inplace=True)