diff --git a/.github/workflows/codestyle.yml b/.github/workflows/codestyle.yml index 3d6b397a4..16061566c 100644 --- a/.github/workflows/codestyle.yml +++ b/.github/workflows/codestyle.yml @@ -1,11 +1,3 @@ -# This workflow will upload a Python Package using Twine when a release is created -# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries - -# This workflow uses actions that are not certified by GitHub. -# They are provided by a third-party and are governed by -# separate terms of service, privacy policy, and support -# documentation. - name: Codestyle checking on: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fbf55accd..2dd3db0a6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,7 +6,6 @@ on: branches: [ develop, release-candidate/* ] pull_request: types: [ opened, synchronize, reopened, ready_for_review ] - branches: [ develop, release-candidate/* ] workflow_call: jobs: diff --git a/flow360/__init__.py b/flow360/__init__.py index 3ade4723e..122b628ec 100644 --- a/flow360/__init__.py +++ b/flow360/__init__.py @@ -145,6 +145,9 @@ SI_unit_system, imperial_unit_system, ) +from flow360.component.simulation.user_code.core.types import UserVariable +from flow360.component.simulation.user_code.functions import math +from flow360.component.simulation.user_code.variables import control, solution from flow360.component.simulation.user_defined_dynamics.user_defined_dynamics import ( UserDefinedDynamic, ) @@ -274,5 +277,9 @@ "StreamlineOutput", "Transformation", "WallRotation", + "UserVariable", + "math", + "control", + "solution", "report", ] diff --git a/flow360/component/project_utils.py b/flow360/component/project_utils.py index fbd64021f..337749947 100644 --- a/flow360/component/project_utils.py +++ b/flow360/component/project_utils.py @@ -21,6 +21,7 @@ from flow360.component.simulation.primitives import Box, Cylinder, GhostSurface from flow360.component.simulation.simulation_params import SimulationParams from flow360.component.simulation.unit_system import LengthType +from flow360.component.simulation.user_code.core.types import save_user_variables from flow360.component.simulation.utils import model_attribute_unlock from flow360.component.simulation.web.asset_base import AssetBase from flow360.component.utils import parse_datetime @@ -281,6 +282,9 @@ def set_up_params_for_uploading( params = _set_up_default_reference_geometry(params, length_unit) + # Convert all reference of UserVariables to VariableToken + params = save_user_variables(params) + return params diff --git a/flow360/component/simulation/blueprint/__init__.py b/flow360/component/simulation/blueprint/__init__.py new file mode 100644 index 000000000..30b3f33c0 --- /dev/null +++ b/flow360/component/simulation/blueprint/__init__.py @@ -0,0 +1,18 @@ +"""Blueprint: Safe function serialization and visual programming integration.""" + +from flow360.component.simulation.blueprint.core.generator import model_to_function +from flow360.component.simulation.blueprint.core.parser import ( + expr_to_model, + function_to_model, +) + +from .core.function import FunctionNode +from .core.types import Evaluable + +__all__ = [ + "FunctionNode", + "Evaluable", + "function_to_model", + "model_to_function", + "expr_to_model", +] diff --git a/flow360/component/simulation/blueprint/core/__init__.py b/flow360/component/simulation/blueprint/core/__init__.py new file mode 100644 index 000000000..4724c0f72 --- /dev/null +++ b/flow360/component/simulation/blueprint/core/__init__.py @@ -0,0 +1,111 @@ +"""Core blueprint functionality.""" + +from .context import EvaluationContext, ReturnValue +from .expressions import ( + BinOpNode, + CallModelNode, + ConstantNode, + ExpressionNode, + ExpressionNodeType, + ListCompNode, + ListNode, + NameNode, + RangeCallNode, + SubscriptNode, + TupleNode, +) +from .function import FunctionNode +from .generator import expr_to_code, model_to_function, stmt_to_code +from .parser import function_to_model +from .statements import ( + AssignNode, + AugAssignNode, + ForLoopNode, + IfElseNode, + ReturnNode, + StatementNode, + StatementNodeType, + TupleUnpackNode, +) +from .types import Evaluable, TargetSyntax + + +def _model_rebuild() -> None: + """Update forward references in the correct order.""" + namespace = { + # Expression types + "NameNode": NameNode, + "ConstantNode": ConstantNode, + "BinOpNode": BinOpNode, + "RangeCallNode": RangeCallNode, + "CallModelNode": CallModelNode, + "TupleNode": TupleNode, + "ListNode": ListNode, + "ListCompNode": ListCompNode, + "SubscriptNode": SubscriptNode, + "ExpressionNodeType": ExpressionNodeType, + # Statement types + "AssignNode": AssignNode, + "AugAssignNode": AugAssignNode, + "IfElseNode": IfElseNode, + "ForLoopNode": ForLoopNode, + "ReturnNode": ReturnNode, + "TupleUnpackNode": TupleUnpackNode, + "StatementNodeType": StatementNodeType, + # Function type + "FunctionNode": FunctionNode, + } + + # First update expression classes that only depend on ExpressionType + BinOpNode.model_rebuild(_types_namespace=namespace) + RangeCallNode.model_rebuild(_types_namespace=namespace) + CallModelNode.model_rebuild(_types_namespace=namespace) + TupleNode.model_rebuild(_types_namespace=namespace) + ListNode.model_rebuild(_types_namespace=namespace) + ListCompNode.model_rebuild(_types_namespace=namespace) + SubscriptNode.model_rebuild(_types_namespace=namespace) + + # Then update statement classes that depend on both types + AssignNode.model_rebuild(_types_namespace=namespace) + AugAssignNode.model_rebuild(_types_namespace=namespace) + IfElseNode.model_rebuild(_types_namespace=namespace) + ForLoopNode.model_rebuild(_types_namespace=namespace) + ReturnNode.model_rebuild(_types_namespace=namespace) + TupleUnpackNode.model_rebuild(_types_namespace=namespace) + + # Finally update Function class + FunctionNode.model_rebuild(_types_namespace=namespace) + + +# Update forward references +_model_rebuild() + + +__all__ = [ + "ExpressionNode", + "NameNode", + "ConstantNode", + "BinOpNode", + "RangeCallNode", + "CallModelNode", + "TupleNode", + "ListNode", + "ListCompNode", + "ExpressionNodeType", + "StatementNode", + "AssignNode", + "AugAssignNode", + "IfElseNode", + "ForLoopNode", + "ReturnNode", + "TupleUnpackNode", + "StatementNodeType", + "FunctionNode", + "EvaluationContext", + "ReturnValue", + "Evaluable", + "expr_to_code", + "stmt_to_code", + "model_to_function", + "function_to_model", +] diff --git a/flow360/component/simulation/blueprint/core/context.py b/flow360/component/simulation/blueprint/core/context.py new file mode 100644 index 000000000..a3b150c39 --- /dev/null +++ b/flow360/component/simulation/blueprint/core/context.py @@ -0,0 +1,154 @@ +"""Evaluation context that contains references to known symbols""" + +from typing import Any, Optional + +import pydantic as pd + +from flow360.component.simulation.blueprint.core.resolver import CallableResolver + + +class ReturnValue(Exception): + """ + Custom exception to signal a 'return' during the evaluation + of a function model. + """ + + def __init__(self, value: Any): + super().__init__("Function returned.") + self.value = value + + +class EvaluationContext: + """ + Manages variable scope and access during function evaluation. + + This class stores named values and optionally resolves names through a + `CallableResolver` when not already defined in the context. + """ + + def __init__( + self, resolver: CallableResolver, initial_values: Optional[dict[str, Any]] = None + ) -> None: + """ + Initialize the evaluation context. + + Args: + resolver (CallableResolver): A resolver used to look up callable names + and constants if not explicitly defined. + initial_values (Optional[dict[str, Any]]): Initial variable values to populate + the context with. + """ + self._values = initial_values or {} + self._data_models = {} + self._resolver = resolver + self._aliases: dict[str, str] = {} + + def get(self, name: str, resolve: bool = True) -> Any: + """ + Retrieve a value by name from the context. + + If the name is not explicitly defined and `resolve` is True, + attempt to resolve it using the resolver. + + Args: + name (str): The variable or callable name to retrieve. + resolve (bool): Whether to attempt to resolve the name if it's undefined. + + Returns: + Any: The corresponding value. + + Raises: + NameError: If the name is not found and cannot be resolved. + ValueError: If resolution is disabled and the name is undefined. + """ + if name not in self._values: + # Try loading from builtin callables/constants if possible + try: + if not resolve: + raise ValueError(f"{name} was not defined explicitly in the context") + val = self.resolve(name) + # If successful, store it so we don't need to import again + self._values[name] = val + except ValueError as err: + raise NameError(f"Name '{name}' is not defined") from err + return self._values[name] + + def get_data_model(self, name: str) -> Optional[pd.BaseModel]: + """Get the Validation model for the given name.""" + if name not in self._data_models: + return None + return self._data_models[name] + + def set_alias(self, name, alias) -> None: + """ + Set alias used for code generation. + This is meant for non-user variables. + """ + self._aliases[name] = alias + + def get_alias(self, name) -> Optional[str]: + """ + Get alias used for code generation. + This is meant for non-user variables. + """ + return self._aliases.get(name) + + def set(self, name: str, value: Any, data_model: pd.BaseModel = None) -> None: + """ + Assign a value to a name in the context. + + Args: + name (str): The variable name to set. + value (Any): The value to assign. + data_model (BaseModel, optional): The type of the associate with this entry (for non-user variables) + """ + self._values[name] = value + + if data_model: + self._data_models[name] = data_model + + def resolve(self, name): + """ + Resolve a name using the provided resolver. + + Args: + name (str): The name to resolve. + + Returns: + Any: The resolved callable or constant. + + Raises: + ValueError: If the name cannot be resolved by the resolver. + """ + return self._resolver.get_allowed_callable(name) + + def can_evaluate(self, name) -> bool: + """ + Check if the name can be evaluated via the resolver. + + Args: + name (str): The name to check. + + Returns: + bool: True if the name is allowed and resolvable, False otherwise. + """ + return self._resolver.can_evaluate(name) + + def copy(self) -> "EvaluationContext": + """ + Create a copy of the current context. + + Returns: + EvaluationContext: A new context instance with the same resolver and a copy + of the current variable values. + """ + return EvaluationContext(self._resolver, dict(self._values)) + + @property + def user_variable_names(self): + """Get the set of user variables in the context.""" + return {name for name in self._values.keys() if "." not in name} + + def clear(self): + """Clear user variables from the context.""" + self._values = {name: value for name, value in self._values.items() if "." in name} diff --git a/flow360/component/simulation/blueprint/core/expressions.py b/flow360/component/simulation/blueprint/core/expressions.py new file mode 100644 index 000000000..453ba79ef --- /dev/null +++ b/flow360/component/simulation/blueprint/core/expressions.py @@ -0,0 +1,350 @@ +"""Data models and evaluator functions for rvalue expression elements""" + +import abc +from typing import Annotated, Any, Literal, Union + +import pydantic as pd + +from ..utils.operators import BINARY_OPERATORS, UNARY_OPERATORS +from .context import EvaluationContext +from .types import Evaluable + +ExpressionNodeType = Annotated[ + # pylint: disable=duplicate-code + Union[ + "NameNode", + "ConstantNode", + "BinOpNode", + "RangeCallNode", + "CallModelNode", + "TupleNode", + "ListNode", + "ListCompNode", + "SubscriptNode", + ], + pd.Field(discriminator="type"), +] + + +class ExpressionNode(pd.BaseModel, Evaluable, metaclass=abc.ABCMeta): + """ + Base class for expressions (like `x > 3`, `range(n)`, etc.). + + Subclasses must implement the `evaluate` and `used_names` methods + to support context-based evaluation and variable usage introspection. + """ + + def used_names(self) -> set[str]: + """ + Return a set of variable names used by the expression. + + Returns: + set[str]: A set of strings representing variable names used in the expression. + """ + raise NotImplementedError + + +class NameNode(ExpressionNode): + """ + Expression representing a name qualifier + """ + + type: Literal["Name"] = "Name" + id: str + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Any: + if raise_on_non_evaluable and not context.can_evaluate(self.id): + raise ValueError(f"Name '{self.id}' cannot be evaluated at client runtime") + if not force_evaluate and not context.can_evaluate(self.id): + data_model = context.get_data_model(self.id) + if data_model: + return data_model.model_validate({"name": self.id, "value": context.get(self.id)}) + raise ValueError("Partially evaluable symbols need to possess a type annotation.") + value = context.get(self.id) + # Recursively evaluate if the returned value is evaluable + if isinstance(value, Evaluable): + value = value.evaluate(context, raise_on_non_evaluable, force_evaluate) + return value + + def used_names(self) -> set[str]: + return {self.id} + + +class ConstantNode(ExpressionNode): + """ + Expression representing a constant numeric value + """ + + type: Literal["Constant"] = "Constant" + value: Any + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Any: # noqa: ARG002 + return self.value + + def used_names(self) -> set[str]: + return set() + + +class UnaryOpNode(ExpressionNode): + """ + Expression representing a unary operation + """ + + type: Literal["UnaryOp"] = "UnaryOp" + op: str + operand: "ExpressionNodeType" + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Any: + operand_val = self.operand.evaluate(context, raise_on_non_evaluable, force_evaluate) + + if self.op not in UNARY_OPERATORS: + raise ValueError(f"Unsupported operator: {self.op}") + + return UNARY_OPERATORS[self.op](operand_val) + + def used_names(self) -> set[str]: + return self.operand.used_names() + + +class BinOpNode(ExpressionNode): + """ + Expression representing a binary operation + """ + + type: Literal["BinOp"] = "BinOp" + left: "ExpressionNodeType" + op: str + right: "ExpressionNodeType" + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Any: + left_val = self.left.evaluate(context, raise_on_non_evaluable, force_evaluate) + right_val = self.right.evaluate(context, raise_on_non_evaluable, force_evaluate) + + if self.op not in BINARY_OPERATORS: + raise ValueError(f"Unsupported operator: {self.op}") + + return BINARY_OPERATORS[self.op](left_val, right_val) + + def used_names(self) -> set[str]: + left = self.left.used_names() + right = self.right.used_names() + return left.union(right) + + +class SubscriptNode(ExpressionNode): + """ + Expression representing an iterable object subscript + """ + + type: Literal["Subscript"] = "Subscript" + value: "ExpressionNodeType" + slice: "ExpressionNodeType" # No proper slicing for now, only constants.. + ctx: str # Only load context + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Any: + value = self.value.evaluate(context, raise_on_non_evaluable, force_evaluate) + item = self.slice.evaluate(context, raise_on_non_evaluable, force_evaluate) + if self.ctx == "Load": + if isinstance(item, float): + item = int(item) + return value[item] + if self.ctx == "Store": + raise NotImplementedError("Subscripted writes are not supported yet") + + raise ValueError(f"Invalid subscript context {self.ctx}") + + def used_names(self) -> set[str]: + value = self.value.used_names() + item = self.slice.used_names() + return value.union(item) + + +class RangeCallNode(ExpressionNode): + """ + Model for something like range(). + """ + + type: Literal["RangeCall"] = "RangeCall" + arg: "ExpressionNodeType" + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> range: + return range(self.arg.evaluate(context, raise_on_non_evaluable, force_evaluate)) + + def used_names(self) -> set[str]: + return self.arg.used_names() + + +class CallModelNode(ExpressionNode): + """Represents a function or method call expression. + + This class handles both direct function calls and method calls through a fully qualified name. + For example: + - Simple function: "sum" + - Method call: "np.array" + - Nested attribute: "td.GridSpec.auto" + """ + + type: Literal["CallModel"] = "CallModel" + func_qualname: str + args: list["ExpressionNodeType"] = [] + kwargs: dict[str, "ExpressionNodeType"] = {} + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Any: + try: + # Split into parts for attribute traversal + parts = self.func_qualname.split(".") + + if len(parts) == 1: + # Direct function call + func = context.resolve(parts[0]) + else: + # Method or nested attribute call + base = context.resolve(parts[0]) + + # Traverse the attribute chain + for part in parts[1:-1]: + base = getattr(base, part) + + # Get the final callable + func = getattr(base, parts[-1]) + + # Evaluate arguments + args = [ + arg.evaluate(context, raise_on_non_evaluable, force_evaluate) for arg in self.args + ] + kwargs = { + k: v.evaluate(context, raise_on_non_evaluable, force_evaluate) + for k, v in self.kwargs.items() + } + + return func(*args, **kwargs) + + except AttributeError as e: + raise ValueError( + f"Invalid attribute in call chain '{self.func_qualname}': {str(e)}" + ) from e + except Exception as e: + raise ValueError(f"Error evaluating call to '{self.func_qualname}': {str(e)}") from e + + def used_names(self) -> set[str]: + names = set() + + for arg in self.args: + names = names.union(arg.used_names()) + + for _, arg in self.kwargs.items(): + names = names.union(arg.used_names()) + + return names + + +class TupleNode(ExpressionNode): + """Model for tuple expressions.""" + + type: Literal["Tuple"] = "Tuple" + elements: list["ExpressionNodeType"] + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> tuple: + return tuple( + elem.evaluate(context, raise_on_non_evaluable, force_evaluate) for elem in self.elements + ) + + def used_names(self) -> set[str]: + return self.arg.used_names() + + +class ListNode(ExpressionNode): + """Model for list expressions.""" + + type: Literal["List"] = "List" + elements: list["ExpressionNodeType"] + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> list: + return [ + elem.evaluate(context, raise_on_non_evaluable, force_evaluate) for elem in self.elements + ] + + def used_names(self) -> set[str]: + names = set() + + for arg in self.elements: + names = names.union(arg.used_names()) + + return names + + +class ListCompNode(ExpressionNode): + """Model for list comprehension expressions.""" + + type: Literal["ListComp"] = "ListComp" + element: "ExpressionNodeType" # The expression to evaluate for each item + target: str # The loop variable name + iter: "ExpressionNodeType" # The iterable expression + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> list: + result = [] + iterable = self.iter.evaluate(context, raise_on_non_evaluable, force_evaluate) + for item in iterable: + # Create a new context for each iteration with the target variable + iter_context = context.copy() + iter_context.set(self.target, item) + result.append( + self.element.evaluate(iter_context, raise_on_non_evaluable, force_evaluate) + ) + return result + + def used_names(self) -> set[str]: + element = self.element.used_names() + iterable = self.iter.used_names() + + return element.union(iterable) diff --git a/flow360/component/simulation/blueprint/core/function.py b/flow360/component/simulation/blueprint/core/function.py new file mode 100644 index 000000000..560ba9873 --- /dev/null +++ b/flow360/component/simulation/blueprint/core/function.py @@ -0,0 +1,38 @@ +"""Data models and evaluator functions for full Python function definitions""" + +from typing import Any + +import pydantic as pd + +from .context import EvaluationContext, ReturnValue +from .statements import StatementNodeType + + +class FunctionNode(pd.BaseModel): + """ + Represents an entire function: + def name(arg1, arg2, ...): + + """ + + name: str + args: list[str] + defaults: dict[str, Any] + body: list[StatementNodeType] + + def __call__(self, context: EvaluationContext, *call_args: Any) -> Any: + # Add default values + for arg_name, default_val in self.defaults.items(): + self.context.set(arg_name, default_val) + + # Add call arguments + for arg_name, arg_val in zip(self.args, call_args, strict=False): + self.context.set(arg_name, arg_val) + + try: + for stmt in self.body: + stmt.evaluate(self.context) + except ReturnValue as rv: + return rv.value + + return None diff --git a/flow360/component/simulation/blueprint/core/generator.py b/flow360/component/simulation/blueprint/core/generator.py new file mode 100644 index 000000000..f3731e5a4 --- /dev/null +++ b/flow360/component/simulation/blueprint/core/generator.py @@ -0,0 +1,306 @@ +"""Code generator for the blueprint module, supports python and C++ syntax for now""" + +# pylint: disable=too-many-return-statements + +from typing import Any, Callable + +from flow360.component.simulation.blueprint.core.expressions import ( + BinOpNode, + CallModelNode, + ConstantNode, + ListCompNode, + ListNode, + NameNode, + RangeCallNode, + SubscriptNode, + TupleNode, + UnaryOpNode, +) +from flow360.component.simulation.blueprint.core.function import FunctionNode +from flow360.component.simulation.blueprint.core.statements import ( + AssignNode, + AugAssignNode, + ForLoopNode, + IfElseNode, + ReturnNode, + TupleUnpackNode, +) +from flow360.component.simulation.blueprint.core.types import TargetSyntax +from flow360.component.simulation.blueprint.utils.operators import ( + BINARY_OPERATORS, + UNARY_OPERATORS, +) + + +def _indent(code: str, level: int = 1) -> str: + """Add indentation to each line of code.""" + spaces = " " * level + return "\n".join(spaces + line if line else line for line in code.split("\n")) + + +def _empty(syntax): + if syntax == TargetSyntax.PYTHON: + return "None" + if syntax == TargetSyntax.CPP: + return "nullptr" + + raise ValueError( + f"Unsupported syntax type, available {[syntax.name for syntax in TargetSyntax]}" + ) + + +def _name(expr, name_translator): + if name_translator: + return name_translator(expr.id) + return expr.id + + +def _constant(expr): + if isinstance(expr.value, str): + return f"'{expr.value}'" + return str(expr.value) + + +def _unary_op(expr, syntax, name_translator): + op_info = UNARY_OPERATORS[expr.op] + + arg = expr_to_code(expr.operand, syntax, name_translator) + + return f"{op_info.symbol}{arg}" + + +def _binary_op(expr, syntax, name_translator): + left = expr_to_code(expr.left, syntax, name_translator) + right = expr_to_code(expr.right, syntax, name_translator) + + if syntax == TargetSyntax.CPP: + # Special case handling for operators not directly supported in CPP syntax, requires #include + if expr.op == "FloorDiv": + return f"floor({left} / {right})" + if expr.op == "Pow": + return f"pow({left}, {right})" + if expr.op == "Is": + return f"&{left} == &{right}" + + op_info = BINARY_OPERATORS[expr.op] + return f"({left} {op_info.symbol} {right})" + + +def _range_call(expr, syntax, name_translator): + if syntax == TargetSyntax.PYTHON: + arg = expr_to_code(expr.arg, syntax, name_translator) + return f"range({arg})" + + raise ValueError("Range calls are only supported for Python target syntax") + + +def _call_model(expr, syntax, name_translator): + if syntax == TargetSyntax.PYTHON: + args = [] + for arg in expr.args: + val_str = expr_to_code(arg, syntax, name_translator) + args.append(val_str) + args_str = ", ".join(args) + kwargs_parts = [] + for k, v in expr.kwargs.items(): + if v is None: + continue + val_str = expr_to_code(v, syntax, name_translator) + if not val_str or val_str.isspace(): + continue + kwargs_parts.append(f"{k}={val_str}") + + kwargs_str = ", ".join(kwargs_parts) + all_args = ", ".join(x for x in [args_str, kwargs_str] if x) + return f"{expr.func_qualname}({all_args})" + if syntax == TargetSyntax.CPP: + args = [] + for arg in expr.args: + val_str = expr_to_code(arg, syntax, name_translator) + args.append(val_str) + args_str = ", ".join(args) + if expr.kwargs: + raise ValueError("Named arguments are not supported in C++ syntax") + return f"{expr.func_qualname}({args_str})" + + raise ValueError( + f"Unsupported syntax type, available {[syntax.name for syntax in TargetSyntax]}" + ) + + +def _tuple(expr, syntax, name_translator): + elements = [expr_to_code(e, syntax, name_translator) for e in expr.elements] + + if syntax == TargetSyntax.PYTHON: + if len(expr.elements) == 0: + return "()" + if len(expr.elements) == 1: + return f"({elements[0]},)" + return f"({', '.join(elements)})" + if syntax == TargetSyntax.CPP: + if len(expr.elements) == 0: + raise TypeError("Zero-length tuple is found in expression.") + return f"std::vector({{{', '.join(elements)}}})" + + raise ValueError( + f"Unsupported syntax type, available {[syntax.name for syntax in TargetSyntax]}" + ) + + +def _list(expr, syntax, name_translator): + elements = [expr_to_code(e, syntax, name_translator) for e in expr.elements] + + if syntax == TargetSyntax.PYTHON: + if len(expr.elements) == 0: + return "[]" + elements_str = ", ".join(elements) + return f"[{elements_str}]" + if syntax == TargetSyntax.CPP: + if len(expr.elements) == 0: + raise TypeError("Zero-length list is found in expression.") + + return f"std::vector({{{', '.join(elements)}}})" + + raise ValueError( + f"Unsupported syntax type, available {[syntax.name for syntax in TargetSyntax]}" + ) + + +def _list_comp(expr, syntax, name_translator): + if syntax == TargetSyntax.PYTHON: + element = expr_to_code(expr.element, syntax, name_translator) + target = expr_to_code(expr.target, syntax, name_translator) + iterator = expr_to_code(expr.iter, syntax, name_translator) + + return f"[{element} for {target} in {iterator}]" + + raise ValueError("List comprehensions are only supported for Python target syntax") + + +def _subscript(expr, syntax, name_translator): # pylint:disable=unused-argument + return f"{name_translator(expr.value.id)}[{expr.slice.value}]" + + +def expr_to_code( + expr: Any, + syntax: TargetSyntax = TargetSyntax.PYTHON, + name_translator: Callable[[str], str] = None, +) -> str: + """Convert an expression model back to source code.""" + if expr is None: + return _empty(syntax) + + # Names and constants are language-agnostic (apart from symbol remaps) + if isinstance(expr, NameNode): + return _name(expr, name_translator) + + if isinstance(expr, ConstantNode): + return _constant(expr) + + if isinstance(expr, UnaryOpNode): + return _unary_op(expr, syntax, name_translator) + + if isinstance(expr, BinOpNode): + return _binary_op(expr, syntax, name_translator) + + if isinstance(expr, RangeCallNode): + return _range_call(expr, syntax, name_translator) + + if isinstance(expr, CallModelNode): + return _call_model(expr, syntax, name_translator) + + if isinstance(expr, TupleNode): + return _tuple(expr, syntax, name_translator) + + if isinstance(expr, ListNode): + return _list(expr, syntax, name_translator) + + if isinstance(expr, ListCompNode): + return _list_comp(expr, syntax, name_translator) + + if isinstance(expr, SubscriptNode): + return _subscript(expr, syntax, name_translator) + + raise ValueError(f"Unsupported expression type: {type(expr)}") + + +def stmt_to_code( + stmt: Any, syntax: TargetSyntax = TargetSyntax.PYTHON, remap: dict[str, str] = None +) -> str: + """Convert a statement model back to source code.""" + if syntax == TargetSyntax.PYTHON: + if isinstance(stmt, AssignNode): + if stmt.target == "_": # Expression statement + return expr_to_code(stmt.value) + return f"{stmt.target} = {expr_to_code(stmt.value, syntax, remap)}" + + if isinstance(stmt, AugAssignNode): + op_map = { + "Add": "+=", + "Sub": "-=", + "Mult": "*=", + "Div": "/=", + } + op_str = op_map.get(stmt.op, f"{stmt.op}=") + return f"{stmt.target} {op_str} {expr_to_code(stmt.value, syntax, remap)}" + + if isinstance(stmt, IfElseNode): + code = [f"if {expr_to_code(stmt.condition)}:"] + code.append(_indent("\n".join(stmt_to_code(s, syntax, remap) for s in stmt.body))) + if stmt.orelse: + code.append("else:") + code.append(_indent("\n".join(stmt_to_code(s, syntax, remap) for s in stmt.orelse))) + return "\n".join(code) + + if isinstance(stmt, ForLoopNode): + code = [f"for {stmt.target} in {expr_to_code(stmt.iter)}:"] + code.append(_indent("\n".join(stmt_to_code(s, syntax, remap) for s in stmt.body))) + return "\n".join(code) + + if isinstance(stmt, ReturnNode): + return f"return {expr_to_code(stmt.value, syntax, remap)}" + + if isinstance(stmt, TupleUnpackNode): + targets = ", ".join(stmt.targets) + if len(stmt.values) == 1: + # Single expression that evaluates to a tuple + return f"{targets} = {expr_to_code(stmt.values[0], syntax, remap)}" + # Multiple expressions + values = ", ".join(expr_to_code(v, syntax, remap) for v in stmt.values) + return f"{targets} = {values}" + + raise ValueError(f"Unsupported statement type: {type(stmt)}") + + raise NotImplementedError("Statement translation is not available for other syntax types yet") + + +def model_to_function( + func: FunctionNode, + syntax: TargetSyntax = TargetSyntax.PYTHON, + remap: dict[str, str] = None, +) -> str: + """Convert a Function model back to source code.""" + if syntax == TargetSyntax.PYTHON: + args_with_defaults = [] + for arg in func.args: + if arg in func.defaults: + default_val = func.defaults[arg] + if isinstance(default_val, (int, float, str, bool)): + args_with_defaults.append(f"{arg}={default_val}") + else: + args_with_defaults.append(f"{arg}={expr_to_code(default_val, syntax, remap)}") + else: + args_with_defaults.append(arg) + + signature = f"def {func.name}({', '.join(args_with_defaults)}):" + + # Convert the function body + body_lines = [] + for stmt in func.body: + line = stmt_to_code(stmt) + body_lines.append(line) + + body = "\n".join(body_lines) if body_lines else "pass" + return f"{signature}\n{_indent(body)}" + + raise NotImplementedError("Function translation is not available for other syntax types yet") diff --git a/flow360/component/simulation/blueprint/core/parser.py b/flow360/component/simulation/blueprint/core/parser.py new file mode 100644 index 000000000..61bb99fc3 --- /dev/null +++ b/flow360/component/simulation/blueprint/core/parser.py @@ -0,0 +1,272 @@ +"""Python code parser using the AST module""" + +# pylint: disable=too-many-return-statements, too-many-branches + +import ast +import inspect +from collections.abc import Callable +from typing import Any, Union + +from flow360.component.simulation.blueprint.core.context import EvaluationContext +from flow360.component.simulation.blueprint.core.expressions import ( + BinOpNode, + CallModelNode, + ConstantNode, + ExpressionNode, + ListCompNode, +) +from flow360.component.simulation.blueprint.core.expressions import ListNode as ListExpr +from flow360.component.simulation.blueprint.core.expressions import ( + NameNode, + RangeCallNode, + SubscriptNode, + TupleNode, + UnaryOpNode, +) +from flow360.component.simulation.blueprint.core.function import FunctionNode +from flow360.component.simulation.blueprint.core.statements import ( + AssignNode, + AugAssignNode, + ForLoopNode, + IfElseNode, + ReturnNode, + TupleUnpackNode, +) + + +def parse_expr(node: ast.AST, ctx: EvaluationContext) -> Any: + """Parse a Python AST expression into our intermediate representation.""" + if isinstance(node, ast.Name): + return NameNode(id=node.id) + + if isinstance(node, ast.Constant): + if hasattr(node, "value"): + return ConstantNode(value=node.value) + return ConstantNode(value=node.s) + + if isinstance(node, ast.Attribute): + # Handle attribute access (e.g., td.inf) + parts = [] + current = node + while isinstance(current, ast.Attribute): + parts.append(current.attr) + current = current.value + if isinstance(current, ast.Name): + parts.append(current.id) + # Create a Name node with the full qualified name + return NameNode(id=".".join(reversed(parts))) + raise ValueError(f"Unsupported attribute access: {ast.dump(node)}") + + if isinstance(node, ast.UnaryOp): + return UnaryOpNode(op=type(node.op).__name__, operand=parse_expr(node.operand, ctx)) + + if isinstance(node, ast.BinOp): + return BinOpNode( + op=type(node.op).__name__, + left=parse_expr(node.left, ctx), + right=parse_expr(node.right, ctx), + ) + + if isinstance(node, ast.Compare): + if len(node.ops) > 1 or len(node.comparators) > 1: + raise ValueError("Only single comparisons are supported") + return BinOpNode( + op=type(node.ops[0]).__name__, + left=parse_expr(node.left, ctx), + right=parse_expr(node.comparators[0], ctx), + ) + + if isinstance(node, ast.Subscript): + return SubscriptNode( + value=parse_expr(node.value, ctx), + slice=parse_expr(node.slice, ctx), + ctx=type(node.ctx).__name__, + ) + + if isinstance(node, ast.Call): + if isinstance(node.func, ast.Name) and node.func.id == "range" and len(node.args) == 1: + return RangeCallNode(arg=parse_expr(node.args[0], ctx)) + + # Build the full qualified name for the function + if isinstance(node.func, ast.Name): + func_name = node.func.id + elif isinstance(node.func, ast.Attribute): + # Handle nested attributes (e.g., td.GridSpec.auto) + parts = [] + current = node.func + while isinstance(current, ast.Attribute): + parts.append(current.attr) + current = current.value + if isinstance(current, ast.Name): + parts.append(current.id) + func_name = ".".join(reversed(parts)) + else: + raise ValueError(f"Unsupported function call: {ast.dump(node)}") + else: + raise ValueError(f"Unsupported function call: {ast.dump(node)}") + + # Parse arguments + args = [parse_expr(arg, ctx) for arg in node.args] + kwargs = { + kw.arg: parse_expr(kw.value, ctx) + for kw in node.keywords + if kw.arg is not None and kw.value is not None # Ensure value is not None + } + + return CallModelNode( + func_qualname=func_name, + args=args, + kwargs=kwargs, + ) + + if isinstance(node, ast.Tuple): + return TupleNode(elements=[parse_expr(elt, ctx) for elt in node.elts]) + + if isinstance(node, ast.List): + return ListExpr(elements=[parse_expr(elt, ctx) for elt in node.elts]) + + if isinstance(node, ast.ListComp): + if len(node.generators) != 1: + raise ValueError("Only single-generator list comprehensions are supported") + gen = node.generators[0] + if not isinstance(gen.target, ast.Name): + raise ValueError("Only simple targets in list comprehensions are supported") + if gen.ifs: + raise ValueError("If conditions in list comprehensions are not supported") + return ListCompNode( + element=parse_expr(node.elt, ctx), + target=gen.target.id, + iter=parse_expr(gen.iter, ctx), + ) + + raise ValueError(f"Unsupported expression type: {type(node)}") + + +def parse_stmt(node: ast.AST, ctx: EvaluationContext) -> Any: + """Parse a Python AST statement into our intermediate representation.""" + if isinstance(node, ast.Assign): + if len(node.targets) > 1: + raise ValueError("Multiple assignment targets not supported") + target = node.targets[0] + + if isinstance(target, ast.Name): + return AssignNode(target=target.id, value=parse_expr(node.value, ctx)) + if isinstance(target, ast.Tuple): + if not all(isinstance(elt, ast.Name) for elt in target.elts): + raise ValueError("Only simple names supported in tuple unpacking") + targets = [elt.id for elt in target.elts] + if isinstance(node.value, ast.Tuple): + values = [parse_expr(val, ctx) for val in node.value.elts] + return TupleUnpackNode(targets=targets, values=values) + return TupleUnpackNode(targets=targets, values=[parse_expr(node.value, ctx)]) + + raise ValueError(f"Unsupported assignment target: {type(target)}") + + if isinstance(node, ast.AugAssign): + if not isinstance(node.target, ast.Name): + raise ValueError("Only simple names supported in augmented assignment") + return AugAssignNode( + target=node.target.id, + op=type(node.op).__name__, + value=parse_expr(node.value, ctx), + ) + + if isinstance(node, ast.Expr): + # For expression statements, we use "_" as a dummy target + return AssignNode(target="_", value=parse_expr(node.value, ctx)) + + if isinstance(node, ast.If): + return IfElseNode( + condition=parse_expr(node.test, ctx), + body=[parse_stmt(stmt, ctx) for stmt in node.body], + orelse=[parse_stmt(stmt, ctx) for stmt in node.orelse] if node.orelse else [], + ) + + if isinstance(node, ast.For): + if not isinstance(node.target, ast.Name): + raise ValueError("Only simple names supported as loop targets") + return ForLoopNode( + target=node.target.id, + iter=parse_expr(node.iter, ctx), + body=[parse_stmt(stmt, ctx) for stmt in node.body], + ) + + if isinstance(node, ast.Return): + if node.value is None: + raise ValueError("Return statements must have a value") + return ReturnNode(value=parse_expr(node.value, ctx)) + + raise ValueError(f"Unsupported statement type: {type(node)}") + + +def function_to_model( + source: Union[str, Callable[..., Any]], + ctx: EvaluationContext, +) -> FunctionNode: + """Parse a Python function definition into our intermediate representation. + + Args: + source: Either a function object or a string containing the function definition + ctx: Evaluation context + """ + + # Convert function object to source string if needed + if callable(source) and not isinstance(source, str): + source = inspect.getsource(source) + + # Parse the source code into an AST + tree = ast.parse(source) + + # We expect a single function definition + if ( + not isinstance(tree, ast.Module) + or len(tree.body) != 1 + or not isinstance(tree.body[0], ast.FunctionDef) + ): + raise ValueError("Expected a single function definition") + + func_def = tree.body[0] + + # Extract function name and arguments + name = func_def.name + args = [arg.arg for arg in func_def.args.args] + defaults: dict[str, Any] = {} + + # Handle default values for arguments + default_offset = len(func_def.args.args) - len(func_def.args.defaults) + for i, default in enumerate(func_def.args.defaults): + arg_name = func_def.args.args[i + default_offset].arg + if isinstance(default, ast.Constant): + defaults[arg_name] = default.value + else: + defaults[arg_name] = parse_expr(default, ctx) + + # Parse the function body + body = [parse_stmt(stmt, ctx) for stmt in func_def.body] + + return FunctionNode(name=name, args=args, body=body, defaults=defaults) + + +def expr_to_model( + source: str, + ctx: EvaluationContext, +) -> ExpressionNode: + """Parse a Python rvalue expression + + Args: + source: a string containing the source + ctx: Optional evaluation context + """ + + # Parse the source code into an AST + tree = ast.parse(source) + + body = tree.body[0] + + # We expect a single line expression + if not isinstance(tree, ast.Module) or len(tree.body) != 1 or not isinstance(body, ast.Expr): + raise ValueError("Expected a single-line rvalue expression") + + expression = parse_expr(body.value, ctx) + + return expression diff --git a/flow360/component/simulation/blueprint/core/resolver.py b/flow360/component/simulation/blueprint/core/resolver.py new file mode 100644 index 000000000..8f2139fa3 --- /dev/null +++ b/flow360/component/simulation/blueprint/core/resolver.py @@ -0,0 +1,98 @@ +"""Whitelisted functions and classes that can be called from blueprint functions.""" + +from __future__ import annotations + +from collections.abc import Callable +from typing import Any + + +class CallableResolver: + """Manages resolution and validation of callable objects. + + Provides a unified interface for resolving function names, methods, and + attributes while enforcing whitelisting rules. + """ + + def __init__(self, callables, modules, imports, blacklist) -> None: + self._import_builtins = imports + self._callable_builtins = callables + self._module_builtins = modules + self._evaluation_blacklist = blacklist + + self._allowed_callables: dict[str, Callable[..., Any]] = {} + self._allowed_modules: dict[str, Any] = {} + + def register_callable(self, name: str, func: Callable[..., Any]) -> None: + """Register a callable for direct use.""" + self._allowed_callables[name] = func + + def register_module(self, name: str, module: Any) -> None: + """Register a module for attribute access.""" + self._allowed_modules[name] = module + + def can_evaluate(self, qualname: str) -> bool: + """Check if the name is not blacklisted for evaluation by the resolver""" + return qualname not in self._evaluation_blacklist + + def get_callable(self, qualname: str) -> Callable[..., Any]: + """Resolve a callable by its qualified name. + + Args: + qualname: Fully qualified name (e.g., "np.array" or "len") + context: Optional evaluation context for local lookups + + Returns: + The resolved callable object + + Raises: + ValueError: If the callable is not allowed or cannot be found + """ + # Check direct allowed callables + if qualname in self._allowed_callables: + return self._allowed_callables[qualname] + + # Handle module attributes + if "." in qualname: + module_name, *attr_parts = qualname.split(".") + if module_name in self._allowed_modules: + obj = self._allowed_modules[module_name] + for part in attr_parts: + obj = getattr(obj, part) + if qualname in self._callable_builtins: + return obj + # Try importing if it's a whitelisted callable + if qualname in self._callable_builtins: + for names, import_func in self._import_builtins.items(): + if module_name in names: + module = import_func(module_name) + self.register_module(module_name, module) + obj = module + for part in attr_parts: + obj = getattr(obj, part) + return obj + + raise ValueError(f"Callable '{qualname}' is not allowed") + + def get_allowed_callable(self, qualname: str) -> Callable[..., Any]: + """Get an allowed callable by name.""" + try: + return self.get_callable(qualname) + except ValueError as e: + # Check if it's a whitelisted callable before trying to import + if ( + qualname in self._callable_builtins + or qualname in self._module_builtins + or any( + qualname.startswith(f"{group['prefix']}{name}") + for group in self._callable_builtins.values() + if group is not None + for name in group["callables"] + ) + ): + # If found in resolver, try importing on demand + for names, import_func in self._import_builtins.items(): + if qualname in names or any(qualname.startswith(prefix) for prefix in names): + callable_obj = import_func(qualname) + self.register_callable(qualname, callable_obj) + return callable_obj + raise ValueError(f"Callable '{qualname}' is not allowed") from e diff --git a/flow360/component/simulation/blueprint/core/statements.py b/flow360/component/simulation/blueprint/core/statements.py new file mode 100644 index 000000000..ecef64c46 --- /dev/null +++ b/flow360/component/simulation/blueprint/core/statements.py @@ -0,0 +1,180 @@ +"""Data models and evaluator functions for single-line Python statements""" + +from typing import Annotated, Literal, Union + +import pydantic as pd + +from .context import EvaluationContext, ReturnValue +from .expressions import ExpressionNodeType +from .types import Evaluable + +# Forward declaration of type +StatementNodeType = Annotated[ + # pylint: disable=duplicate-code + Union[ + "AssignNode", + "AugAssignNode", + "IfElseNode", + "ForLoopNode", + "ReturnNode", + "TupleUnpackNode", + ], + pd.Field(discriminator="type"), +] + + +class StatementNode(pd.BaseModel, Evaluable): + """ + Base class for statements (like 'if', 'for', assignments, etc.). + """ + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> None: + raise NotImplementedError + + +class AssignNode(StatementNode): + """ + Represents something like 'result = '. + """ + + type: Literal["Assign"] = "Assign" + target: str + value: ExpressionNodeType + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> None: + context.set( + self.target, self.value.evaluate(context, raise_on_non_evaluable, force_evaluate) + ) + + +class AugAssignNode(StatementNode): + """ + Represents something like 'result += '. + The 'op' is again the operator class name (e.g. 'Add', 'Mult', etc.). + """ + + type: Literal["AugAssign"] = "AugAssign" + target: str + op: str + value: ExpressionNodeType + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> None: + old_val = context.get(self.target) + increment = self.value.evaluate(context, raise_on_non_evaluable, force_evaluate) + if self.op == "Add": + context.set(self.target, old_val + increment) + elif self.op == "Sub": + context.set(self.target, old_val - increment) + elif self.op == "Mult": + context.set(self.target, old_val * increment) + elif self.op == "Div": + context.set(self.target, old_val / increment) + else: + raise ValueError(f"Unsupported augmented assignment operator: {self.op}") + + +class IfElseNode(StatementNode): + """ + Represents an if/else block: + if condition: + + else: + + """ + + type: Literal["IfElse"] = "IfElse" + condition: ExpressionNodeType + body: list["StatementNodeType"] + orelse: list["StatementNodeType"] + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> None: + if self.condition.evaluate(context, raise_on_non_evaluable, force_evaluate): + for stmt in self.body: + stmt.evaluate(context, raise_on_non_evaluable, force_evaluate) + else: + for stmt in self.orelse: + stmt.evaluate(context, raise_on_non_evaluable) + + +class ForLoopNode(StatementNode): + """ + Represents a for loop: + for in : + + """ + + type: Literal["ForLoop"] = "ForLoop" + target: str + iter: ExpressionNodeType + body: list["StatementNodeType"] + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> None: + iterable = self.iter.evaluate(context, raise_on_non_evaluable, force_evaluate) + for item in iterable: + context.set(self.target, item) + for stmt in self.body: + stmt.evaluate(context, raise_on_non_evaluable, force_evaluate) + + +class ReturnNode(StatementNode): + """ + Represents a return statement: return . + We'll raise a custom exception to stop execution in the function. + """ + + type: Literal["Return"] = "Return" + value: ExpressionNodeType + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> None: + val = self.value.evaluate(context, raise_on_non_evaluable, force_evaluate) + raise ReturnValue(val) + + +class TupleUnpackNode(StatementNode): + """Model for tuple unpacking assignments.""" + + type: Literal["TupleUnpack"] = "TupleUnpack" + targets: list[str] + values: list[ExpressionNodeType] + + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> None: + evaluated_values = [ + val.evaluate(context, raise_on_non_evaluable, force_evaluate) for val in self.values + ] + for target, value in zip(self.targets, evaluated_values): + context.set(target, value) diff --git a/flow360/component/simulation/blueprint/core/types.py b/flow360/component/simulation/blueprint/core/types.py new file mode 100644 index 000000000..6e5572ced --- /dev/null +++ b/flow360/component/simulation/blueprint/core/types.py @@ -0,0 +1,42 @@ +"""Shared type definitions for blueprint core submodules""" + +# pylint: disable=too-few-public-methods + +import abc +from enum import Enum +from typing import Any + +from .context import EvaluationContext + + +class Evaluable(metaclass=abc.ABCMeta): + """Base class for all classes that allow evaluation from their symbolic form""" + + @abc.abstractmethod + def evaluate( + self, + context: EvaluationContext, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Any: + """ + Evaluate the expression using the given context. + + Args: + context (EvaluationContext): The context in which to evaluate the expression. + raise_on_non_evaluable (bool): If True, raise an error on non-evaluable symbols; + if False, allow graceful failure or fallback behavior. + force_evaluate (bool): If True, evaluate evaluable objects marked as + non-evaluable, instead of returning their identifier. + Returns: + Any: The evaluated value. + """ + raise NotImplementedError + + +class TargetSyntax(Enum): + """Target syntax enum, Python and""" + + PYTHON = ("python",) + CPP = ("cpp",) + # Possibly other languages in the future if needed... diff --git a/flow360/component/simulation/blueprint/utils/__init__.py b/flow360/component/simulation/blueprint/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/flow360/component/simulation/blueprint/utils/operators.py b/flow360/component/simulation/blueprint/utils/operators.py new file mode 100644 index 000000000..89bc9dcdf --- /dev/null +++ b/flow360/component/simulation/blueprint/utils/operators.py @@ -0,0 +1,51 @@ +"""Operator info for the parser module""" + +# pylint: disable=too-few-public-methods + +import operator +from collections.abc import Callable +from typing import Any, Union + + +class OpInfo: + """Class to hold operator information.""" + + def __init__( + self, func: Union[Callable[[Any], Any], Callable[[Any, Any], Any]], symbol: str + ) -> None: + self.func = func + self.symbol = symbol + + def __call__(self, *args: Any) -> Any: + return self.func(*args) + + +UNARY_OPERATORS = { + "UAdd": OpInfo(operator.pos, "+"), + "USub": OpInfo(operator.neg, "-"), +} + +BINARY_OPERATORS = { + # Arithmetic operators + "Add": OpInfo(operator.add, "+"), + "Sub": OpInfo(operator.sub, "-"), + "Mult": OpInfo(operator.mul, "*"), + "Div": OpInfo(operator.truediv, "/"), + "FloorDiv": OpInfo(operator.floordiv, "//"), + "Mod": OpInfo(operator.mod, "%"), + "Pow": OpInfo(operator.pow, "**"), + # Comparison operators + "Eq": OpInfo(operator.eq, "=="), + "NotEq": OpInfo(operator.ne, "!="), + "Lt": OpInfo(operator.lt, "<"), + "LtE": OpInfo(operator.le, "<="), + "Gt": OpInfo(operator.gt, ">"), + "GtE": OpInfo(operator.ge, ">="), + "Is": OpInfo(operator.is_, "is"), + # Bitwise operators + "BitAnd": OpInfo(operator.and_, "&"), + "BitOr": OpInfo(operator.or_, "|"), + "BitXor": OpInfo(operator.xor, "^"), + "LShift": OpInfo(operator.lshift, "<<"), + "RShift": OpInfo(operator.rshift, ">>"), +} diff --git a/flow360/component/simulation/entity_info.py b/flow360/component/simulation/entity_info.py index ba689be26..af7b48773 100644 --- a/flow360/component/simulation/entity_info.py +++ b/flow360/component/simulation/entity_info.py @@ -386,7 +386,7 @@ def get_registry(self, internal_registry, **_) -> EntityRegistry: body_group_tag = self.body_group_tag internal_registry = self._group_entity_by_tag( - "body", self.body_group_tag, registry=internal_registry + "body", body_group_tag, registry=internal_registry ) return internal_registry diff --git a/flow360/component/simulation/framework/base_model.py b/flow360/component/simulation/framework/base_model.py index bf0f6ba1b..a9e7b0592 100644 --- a/flow360/component/simulation/framework/base_model.py +++ b/flow360/component/simulation/framework/base_model.py @@ -5,6 +5,7 @@ import copy import hashlib import json +import re from itertools import chain from typing import Any, List, Literal, Set, get_origin @@ -30,6 +31,9 @@ "private_attribute_entity_type_name", ] +# matches every virtual path part that Pydantic adds for function validators +_FUNCTION_SEGMENT = re.compile(r"^function-") + def _preprocess_nested_list(value, required_by, params, exclude, registry_lookup): new_list = [] @@ -164,7 +168,7 @@ def __pydantic_init_subclass__(cls, **kwargs) -> None: # pylint: disable=fixme # TODO: Remove alias_generator since it is only for translator alias_generator=pd.AliasGenerator( - serialization_alias=snake_to_camel, + alias=snake_to_camel, ), ) @@ -328,26 +332,45 @@ def validate_conditionally_required_field(cls, value, info): @classmethod def populate_ctx_to_error_messages(cls, values, handler, info) -> Any: """ - this validator populates ctx messages of fields tagged with "relevant_for" context - it will populate to all child messages + this validator: + - populates ctx messages of fields tagged with "relevant_for" context it will populate to all child messages + - flattens ``loc`` by removing any segment that starts with "function-" """ try: return handler(values) except pd.ValidationError as e: - validation_errors = e.errors() + raw_errors = e.errors() relevant_for = cls._get_field_context(info, "relevant_for") - if relevant_for is not None: - for i, error in enumerate(validation_errors): - ctx = error.get("ctx", {}) - if ctx.get("relevant_for") is None: - # Enforce the relevant_for to be a list for consistency - ctx["relevant_for"] = ( - relevant_for if isinstance(relevant_for, list) else [relevant_for] - ) - validation_errors[i]["ctx"] = ctx + cleaned_errors: list[InitErrorDetails] = [] + + for error in raw_errors: + + new_loc = tuple( + seg + for seg in error["loc"] + if not (isinstance(seg, str) and _FUNCTION_SEGMENT.match(seg)) + ) + + ctx = error.get("ctx", {}) + if relevant_for is not None and ctx.get("relevant_for") is None: + # Enforce the relevant_for to be a list for consistency + ctx["relevant_for"] = ( + relevant_for if isinstance(relevant_for, list) else [relevant_for] + ) + + cleaned_errors.append( + InitErrorDetails( + type=error["type"], + loc=new_loc, + msg=error["msg"], + input=error.get("input"), + ctx=ctx, + ) + ) + raise pd.ValidationError.from_exception_data( - title=cls.__class__.__name__, line_errors=validation_errors - ) + title=cls.__class__.__name__, line_errors=cleaned_errors + ) from None # Note: to_solver architecture will be reworked in favor of splitting the models between # the user-side and solver-side models (see models.py and models_avl.py for reference diff --git a/flow360/component/simulation/framework/param_utils.py b/flow360/component/simulation/framework/param_utils.py index aeb808433..82595ada3 100644 --- a/flow360/component/simulation/framework/param_utils.py +++ b/flow360/component/simulation/framework/param_utils.py @@ -1,6 +1,6 @@ """pre processing and post processing utilities for simulation parameters.""" -from typing import Optional, Union +from typing import Annotated, List, Optional, Union import pydantic as pd @@ -18,8 +18,17 @@ _VolumeEntityBase, ) from flow360.component.simulation.unit_system import LengthType +from flow360.component.simulation.user_code.core.types import ( + VariableContextInfo, + update_global_context, +) from flow360.component.simulation.utils import model_attribute_unlock +VariableContextList = Annotated[ + List[VariableContextInfo], + pd.AfterValidator(update_global_context), +] + class AssetCache(Flow360BaseModel): """ @@ -38,6 +47,9 @@ class AssetCache(Flow360BaseModel): use_geometry_AI: bool = pd.Field( False, description="Flag whether user requested the use of GAI." ) + project_variables: Optional[VariableContextList] = pd.Field( + None, description="List of user variables that are used in all the `Expression` instances." + ) @property def boundaries(self): @@ -49,6 +61,42 @@ def boundaries(self): return self.project_entity_info.get_boundaries() +def find_instances(obj, target_type): + """Recursively find items of target_type within a python object""" + stack = [obj] + seen_ids = set() + results = set() + + while stack: + current = stack.pop() + + obj_id = id(current) + if obj_id in seen_ids: + continue + seen_ids.add(obj_id) + + if isinstance(current, target_type): + results.add(current) + + if isinstance(current, dict): + stack.extend(current.keys()) + stack.extend(current.values()) + + elif isinstance(current, (list, tuple, set, frozenset)): + stack.extend(current) + + elif hasattr(current, "__dict__"): + stack.extend(vars(current).values()) + + elif hasattr(current, "__iter__") and not isinstance(current, (str, bytes)): + try: + stack.extend(iter(current)) + except Exception: # pylint: disable=broad-exception-caught + pass # skip problematic iterables + + return list(results) + + def register_entity_list(model: Flow360BaseModel, registry: EntityRegistry) -> None: """ Registers entities used/occurred in a Flow360BaseModel instance to an EntityRegistry. diff --git a/flow360/component/simulation/framework/updater.py b/flow360/component/simulation/framework/updater.py index 5f2ec66a3..71b1f726e 100644 --- a/flow360/component/simulation/framework/updater.py +++ b/flow360/component/simulation/framework/updater.py @@ -176,7 +176,7 @@ def _to_25_4_1(params_as_dict): return params_as_dict -def _to_25_6_0(params_as_dict): +def _to_25_6_1(params_as_dict): # Known: There can not be velocity_direction both under Inflow AND TotalPressure # Move the velocity_direction under TotalPressure to the Inflow level. @@ -189,6 +189,19 @@ def _to_25_6_0(params_as_dict): if velocity_direction: model["velocity_direction"] = velocity_direction + # What version is this? + if "reference_geometry" in params_as_dict and "area" in params_as_dict["reference_geometry"]: + if ( + params_as_dict["reference_geometry"]["area"] is not None + and "type_name" not in params_as_dict["reference_geometry"]["area"] + ): + params_as_dict["reference_geometry"]["area"]["type_name"] = "number" + + # Add similar logic for step_size in time_stepping + if "time_stepping" in params_as_dict and "step_size" in params_as_dict["time_stepping"]: + if "type_name" not in params_as_dict["time_stepping"]["step_size"]: + params_as_dict["time_stepping"]["step_size"]["type_name"] = "number" + return params_as_dict @@ -200,7 +213,7 @@ def _to_25_6_0(params_as_dict): (Flow360Version("25.2.1"), _to_25_2_1), (Flow360Version("25.2.3"), _to_25_2_3), (Flow360Version("25.4.1"), _to_25_4_1), - (Flow360Version("25.6.0"), _to_25_6_0), + (Flow360Version("25.6.1"), _to_25_6_1), ] # A list of the Python API version tuple with there corresponding updaters. diff --git a/flow360/component/simulation/outputs/output_fields.py b/flow360/component/simulation/outputs/output_fields.py index 6a2c11d7c..cb1795445 100644 --- a/flow360/component/simulation/outputs/output_fields.py +++ b/flow360/component/simulation/outputs/output_fields.py @@ -395,7 +395,7 @@ def _distribute_shared_output_fields(solver_values: dict, item_names: str): return shared_fields = solver_values.pop("output_fields") if solver_values[item_names] is not None: - for name in solver_values[item_names].names(): + for name in solver_values[item_names].user_variables(): item = solver_values[item_names][name] for field in shared_fields: if item.output_fields is None: diff --git a/flow360/component/simulation/outputs/outputs.py b/flow360/component/simulation/outputs/outputs.py index 1fb58828d..14d9521f0 100644 --- a/flow360/component/simulation/outputs/outputs.py +++ b/flow360/component/simulation/outputs/outputs.py @@ -36,7 +36,11 @@ GhostSurface, Surface, ) -from flow360.component.simulation.unit_system import LengthType +from flow360.component.simulation.unit_system import LengthType, unit_system_manager +from flow360.component.simulation.user_code.core.types import ( + SolverVariable, + UserVariable, +) from flow360.component.simulation.validation.validation_context import ( ALL, CASE, @@ -119,6 +123,33 @@ def _validate_non_liquid_output_fields(cls, value: UniqueItemList): ) return value + @pd.field_validator("output_fields", mode="before") + @classmethod + def _convert_solver_variables_as_user_variables(cls, value): + # Handle both dict/list (deserialization) and UniqueItemList (python object) + def solver_variable_to_user_variable(item): + if isinstance(item, SolverVariable): + if unit_system_manager.current is None: + raise ValueError( + f"Solver variable {item.name} cannot be used without a unit system." + ) + unit_system_name = unit_system_manager.current.name + name = item.name.split(".")[-1] if "." in item.name else item.name + return UserVariable(name=f"{name}_{unit_system_name}", value=item) + return item + + # If input is a dict (from deserialization so no SolverVariable expected) + if isinstance(value, dict): + return value + # If input is a list (from Python mode) + if isinstance(value, list): + return [solver_variable_to_user_variable(item) for item in value] + # If input is a UniqueItemList (python object) + if hasattr(value, "items") and isinstance(value.items, list): + value.items = [solver_variable_to_user_variable(item) for item in value.items] + return value + return value + class _AnimationSettings(_OutputBase): """ @@ -193,7 +224,7 @@ class SurfaceOutput(_AnimationAndFileFormatSettings): + "Will choose the value of the last instance of this option of the same output type " + "(:class:`SurfaceOutput` or :class:`TimeAverageSurfaceOutput`) in the output list.", ) - output_fields: UniqueItemList[Union[SurfaceFieldNames, str]] = pd.Field( + output_fields: UniqueItemList[Union[SurfaceFieldNames, str, UserVariable]] = pd.Field( description="List of output variables. Including :ref:`universal output variables`," + " :ref:`variables specific to SurfaceOutput` and :class:`UserDefinedField`." ) @@ -260,7 +291,7 @@ class VolumeOutput(_AnimationAndFileFormatSettings): """ name: Optional[str] = pd.Field("Volume output", description="Name of the `VolumeOutput`.") - output_fields: UniqueItemList[Union[VolumeFieldNames, str]] = pd.Field( + output_fields: UniqueItemList[Union[VolumeFieldNames, str, UserVariable]] = pd.Field( description="List of output variables. Including :ref:`universal output variables`," " :ref:`variables specific to VolumeOutput`" " and :class:`UserDefinedField`." @@ -328,7 +359,7 @@ class SliceOutput(_AnimationAndFileFormatSettings): alias="slices", description="List of output :class:`~flow360.Slice` entities.", ) - output_fields: UniqueItemList[Union[SliceFieldNames, str]] = pd.Field( + output_fields: UniqueItemList[Union[SliceFieldNames, str, UserVariable]] = pd.Field( description="List of output variables. Including :ref:`universal output variables`," " :ref:`variables specific to SliceOutput`" " and :class:`UserDefinedField`." @@ -414,7 +445,7 @@ class IsosurfaceOutput(_AnimationAndFileFormatSettings): alias="isosurfaces", description="List of :class:`~flow360.Isosurface` entities.", ) - output_fields: UniqueItemList[Union[CommonFieldNames, str]] = pd.Field( + output_fields: UniqueItemList[Union[CommonFieldNames, str, UserVariable]] = pd.Field( description="List of output variables. Including " ":ref:`universal output variables` and :class:`UserDefinedField`." ) @@ -451,7 +482,7 @@ class SurfaceIntegralOutput(_OutputBase): alias="surfaces", description="List of boundaries where the surface integral will be calculated.", ) - output_fields: UniqueItemList[str] = pd.Field( + output_fields: UniqueItemList[Union[str, UserVariable]] = pd.Field( description="List of output variables, only the :class:`UserDefinedField` is allowed." ) output_type: Literal["SurfaceIntegralOutput"] = pd.Field("SurfaceIntegralOutput", frozen=True) @@ -515,14 +546,14 @@ class ProbeOutput(_OutputBase): + "monitor group. :class:`~flow360.PointArray` is used to " + "define monitored points along a line.", ) - output_fields: UniqueItemList[Union[CommonFieldNames, str]] = pd.Field( + output_fields: UniqueItemList[Union[CommonFieldNames, str, UserVariable]] = pd.Field( description="List of output fields. Including :ref:`universal output variables`" " and :class:`UserDefinedField`." ) output_type: Literal["ProbeOutput"] = pd.Field("ProbeOutput", frozen=True) -class SurfaceProbeOutput(Flow360BaseModel): +class SurfaceProbeOutput(_OutputBase): """ :class:`SurfaceProbeOutput` class for setting surface output data probed at monitor points. The specified monitor point will be projected to the :py:attr:`~SurfaceProbeOutput.target_surfaces` @@ -579,7 +610,7 @@ class SurfaceProbeOutput(Flow360BaseModel): + "entities belonging to this monitor group." ) - output_fields: UniqueItemList[Union[SurfaceFieldNames, str]] = pd.Field( + output_fields: UniqueItemList[Union[SurfaceFieldNames, str, UserVariable]] = pd.Field( description="List of output variables. Including :ref:`universal output variables`," " :ref:`variables specific to SurfaceOutput` and :class:`UserDefinedField`." ) @@ -608,7 +639,7 @@ class SurfaceSliceOutput(_AnimationAndFileFormatSettings): output_format: Literal["paraview"] = pd.Field(default="paraview") - output_fields: UniqueItemList[Union[SurfaceFieldNames, str]] = pd.Field( + output_fields: UniqueItemList[Union[SurfaceFieldNames, str, UserVariable]] = pd.Field( description="List of output variables. Including :ref:`universal output variables`," " :ref:`variables specific to SurfaceOutput` and :class:`UserDefinedField`." ) diff --git a/flow360/component/simulation/primitives.py b/flow360/component/simulation/primitives.py index aeca8090d..8619beb27 100644 --- a/flow360/component/simulation/primitives.py +++ b/flow360/component/simulation/primitives.py @@ -21,6 +21,7 @@ ) from flow360.component.simulation.framework.unique_list import UniqueStringList from flow360.component.simulation.unit_system import AngleType, AreaType, LengthType +from flow360.component.simulation.user_code.core.types import ValueOrExpression from flow360.component.simulation.utils import model_attribute_unlock from flow360.component.types import Axis @@ -87,7 +88,7 @@ class ReferenceGeometry(Flow360BaseModel): moment_length: Optional[Union[LengthType.Positive, LengthType.PositiveVector]] = pd.Field( None, description="The x, y, z component-wise moment reference lengths." ) - area: Optional[AreaType.Positive] = pd.Field( + area: Optional[ValueOrExpression[AreaType.Positive]] = pd.Field( None, description="The reference area of the geometry." ) diff --git a/flow360/component/simulation/services.py b/flow360/component/simulation/services.py index 1d7489ca4..ce48d7ac7 100644 --- a/flow360/component/simulation/services.py +++ b/flow360/component/simulation/services.py @@ -1,13 +1,17 @@ """Simulation services module.""" -# pylint: disable=duplicate-code +# pylint: disable=duplicate-code, too-many-lines import json import os from enum import Enum +from numbers import Number from typing import Any, Collection, Dict, Literal, Optional, Tuple, Union +import numpy as np import pydantic as pd +from unyt import unyt_array +# Required for correct global scope initialization from flow360.component.simulation.exposed_units import supported_units_by_front_end from flow360.component.simulation.framework.multi_constructor_model_base import ( parse_model_dict, @@ -25,7 +29,9 @@ from flow360.component.simulation.models.volume_models import ( # pylint: disable=unused-import BETDisk, ) -from flow360.component.simulation.operating_condition.operating_condition import ( # pylint: disable=unused-import + +# pylint: disable=unused-import +from flow360.component.simulation.operating_condition.operating_condition import ( AerospaceCondition, GenericReferenceCondition, ThermalState, @@ -37,6 +43,8 @@ ReferenceGeometry, SimulationParams, ) + +# Required for correct global scope initialization from flow360.component.simulation.translator.solver_translator import get_solver_json from flow360.component.simulation.translator.surface_meshing_translator import ( get_surface_meshing_json, @@ -56,6 +64,7 @@ u, unit_system_manager, ) +from flow360.component.simulation.user_code.core.types import Expression, UserVariable from flow360.component.simulation.utils import model_attribute_unlock from flow360.component.simulation.validation.validation_context import ( ALL, @@ -70,6 +79,9 @@ from flow360.plugins.report.report import get_default_report_summary_template from flow360.version import __version__ +# Required for correct global scope initialization + + unit_system_map = { "SI": SI_unit_system, "CGS": CGS_unit_system, @@ -363,6 +375,8 @@ def validate_model( updated_param_as_dict = parse_model_dict(updated_param_as_dict, globals()) + SimulationParams.initialize_variable_space(updated_param_as_dict) + additional_info = ParamsValidationInfo(param_as_dict=updated_param_as_dict) with ValidationContext(levels=validation_levels_to_use, info=additional_info): validated_param = SimulationParams(file_content=updated_param_as_dict) @@ -409,7 +423,9 @@ def clean_unrelated_setting_from_params_dict(params: dict, root_item_type: str) return params -def handle_generic_exception(err: Exception, validation_errors: Optional[list]) -> list: +def handle_generic_exception( + err: Exception, validation_errors: Optional[list], loc_prefix: Optional[list[str]] = None +) -> list: """ Handles generic exceptions during validation, adding to validation errors. @@ -419,6 +435,8 @@ def handle_generic_exception(err: Exception, validation_errors: Optional[list]) The exception caught during validation. validation_errors : list or None Current list of validation errors, may be None. + loc_prefix : list or None + Prefix of the location of the generic error to help locate the issue Returns ------- @@ -431,7 +449,7 @@ def handle_generic_exception(err: Exception, validation_errors: Optional[list]) validation_errors.append( { "type": err.__class__.__name__.lower().replace("error", "_error"), - "loc": ["unknown"], + "loc": ["unknown"] if loc_prefix is None else loc_prefix, "msg": str(err), "ctx": {}, } @@ -779,6 +797,65 @@ def update_simulation_json(*, params_as_dict: dict, target_python_api_version: s return updated_params_as_dict, errors +# pylint: disable=too-many-branches +def validate_expression(variables: list[dict], expressions: list[str]): + """ + Validate all given expressions using the specified variable space (which is also validated) + """ + errors = [] + values = [] + units = [] + + # Populate variable scope + for i, variable in enumerate(variables): + loc_hint = ["variables", str(i)] + try: + variable = UserVariable(name=variable["name"], value=variable["value"]) + if variable and isinstance(variable.value, Expression): + _ = variable.value.evaluate(raise_on_non_evaluable=False) + except pd.ValidationError as err: + errors.extend(err.errors()) + except Exception as err: # pylint: disable=broad-exception-caught + handle_generic_exception(err, errors, loc_hint) + + for i, expression in enumerate(expressions): + loc_hint = ["expressions", str(i)] + value = None + unit = None + try: + expression_object = Expression(expression=expression) + result = expression_object.evaluate(raise_on_non_evaluable=False) + if isinstance(result, (list, np.ndarray)): + if np.isnan(result).all(): + pass + elif isinstance(result, Number) and np.isnan(result): + pass + elif isinstance(result, Number): + value = result + elif isinstance(result, unyt_array): + if result.size == 1: + value = float(result.value) + else: + value = tuple(result.value.tolist()) + unit = str(result.units.expr) + elif isinstance(result, np.ndarray): + if result.size == 1: + value = float(result[0]) + else: + value = tuple(result.tolist()) + + # Test symbolically + expression_object.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + except pd.ValidationError as err: + errors.extend(err.errors()) + except Exception as err: # pylint: disable=broad-exception-caught + handle_generic_exception(err, errors, loc_hint) + values.append(value) + units.append(unit) + + return errors, values, units + + def _serialize_unit_in_dict(data): """ Recursively serialize unit type data in a dictionary or list. diff --git a/flow360/component/simulation/simulation_params.py b/flow360/component/simulation/simulation_params.py index 4dd078fd1..e068c561b 100644 --- a/flow360/component/simulation/simulation_params.py +++ b/flow360/component/simulation/simulation_params.py @@ -4,7 +4,7 @@ from __future__ import annotations -from typing import Annotated, List, Optional, Union +from typing import Annotated, Iterable, List, Optional, Union import pydantic as pd import unyt as u @@ -69,6 +69,7 @@ unit_system_manager, unyt_quantity, ) +from flow360.component.simulation.user_code.core.types import UserVariable from flow360.component.simulation.user_defined_dynamics.user_defined_dynamics import ( UserDefinedDynamic, ) @@ -212,6 +213,7 @@ def _init_with_unit_context(self, **kwargs): """ # When treating dicts the updater is skipped. kwargs = _ParamModelBase._init_check_unit_system(**kwargs) + super().__init__(unit_system=unit_system_manager.current, **kwargs) # pylint: disable=super-init-not-called @@ -376,11 +378,33 @@ def convert_unit( converted = value.in_base(unit_system=target_system) return converted + # We have no way forcing validator call order so this is a workaround + @classmethod + def initialize_variable_space(cls, value: dict): + """Load all user variables from private attributes when a simulation params object is initialized""" + if "private_attribute_asset_cache" not in value.keys(): + return value + asset_cache: dict = value["private_attribute_asset_cache"] + if "project_variables" not in asset_cache.keys(): + return value + if isinstance(asset_cache["project_variables"], Iterable): + for variable_dict in asset_cache["project_variables"]: + value_or_expression = { + key: value + for key, value in variable_dict["value"].items() + if key != "postProcessing" + } + UserVariable( + name=variable_dict["name"], + value=value_or_expression, + ) + return value + # pylint: disable=no-self-argument @pd.field_validator("models", mode="after") @classmethod def apply_default_fluid_settings(cls, v): - """apply default Fluid() settings if not found in models""" + """Apply default Fluid() settings if not found in models""" if v is None: v = [] assert isinstance(v, list) diff --git a/flow360/component/simulation/time_stepping/time_stepping.py b/flow360/component/simulation/time_stepping/time_stepping.py index 78464c69f..72e1f09da 100644 --- a/flow360/component/simulation/time_stepping/time_stepping.py +++ b/flow360/component/simulation/time_stepping/time_stepping.py @@ -6,6 +6,7 @@ from flow360.component.simulation.framework.base_model import Flow360BaseModel from flow360.component.simulation.unit_system import TimeType +from flow360.component.simulation.user_code.core.types import ValueOrExpression def _apply_default_to_none(original, default): @@ -174,7 +175,9 @@ class Unsteady(Flow360BaseModel): ) steps: pd.PositiveInt = pd.Field(description="Number of physical steps.") # pylint: disable=no-member - step_size: TimeType.Positive = pd.Field(description="Time step size in physical step marching,") + step_size: ValueOrExpression[TimeType.Positive] = pd.Field( + description="Time step size in physical step marching," + ) # pylint: disable=duplicate-code CFL: Union[RampCFL, AdaptiveCFL] = pd.Field( default=AdaptiveCFL.default_unsteady(), diff --git a/flow360/component/simulation/translator/solver_translator.py b/flow360/component/simulation/translator/solver_translator.py index 53f71e5be..53327ecb1 100644 --- a/flow360/component/simulation/translator/solver_translator.py +++ b/flow360/component/simulation/translator/solver_translator.py @@ -1,8 +1,12 @@ """Flow360 solver setting parameter translator.""" # pylint: disable=too-many-lines +from numbers import Number from typing import Type, Union +import numpy as np +import unyt as u + from flow360.component.simulation.conversion import LIQUID_IMAGINARY_FREESTREAM_MACH from flow360.component.simulation.framework.entity_base import EntityList from flow360.component.simulation.models.material import Sutherland @@ -73,18 +77,24 @@ from flow360.component.simulation.primitives import Box, SurfacePair from flow360.component.simulation.simulation_params import SimulationParams from flow360.component.simulation.time_stepping.time_stepping import Steady, Unsteady +from flow360.component.simulation.translator.user_expression_utils import ( + udf_prepending_code, +) from flow360.component.simulation.translator.utils import ( _get_key_name, convert_tuples_to_lists, get_global_setting_from_first_instance, has_instance_in_list, + inline_expressions_in_dict, preprocess_input, remove_units_in_dict, replace_dict_key, translate_setting_and_apply_to_all_entities, + translate_value_or_expression_object, update_dict_recursively, ) from flow360.component.simulation.unit_system import LengthType +from flow360.component.simulation.user_code.core.types import Expression, UserVariable from flow360.component.simulation.utils import ( is_exact_instance, is_instance_of_type_in_union, @@ -236,11 +246,22 @@ def translate_output_fields( ], ): """Get output fields""" - return {"outputFields": append_component_to_output_fields(output_model.output_fields.items)} + output_fields = [] + + for item in append_component_to_output_fields(output_model.output_fields.items): + output_fields.append(item) + + for output_field in output_model.output_fields.items: + if isinstance(output_field, UserVariable): + # Remove the UserVariable object and add its name + output_fields.append(output_field.name) + # Filter out the UserVariable Dicts + output_fields = [item for item in output_fields if isinstance(item, str)] + return {"outputFields": output_fields} def surface_probe_setting_translation_func(entity: SurfaceProbeOutput): - """Translate non-entitties part of SurfaceProbeOutput""" + """Translate non-entities part of SurfaceProbeOutput""" dict_with_merged_output_fields = monitor_translator(entity) dict_with_merged_output_fields["surfacePatches"] = [ surface.full_name for surface in entity.target_surfaces.stored_entities @@ -354,13 +375,24 @@ def translate_volume_output( is_average=volume_output_class is TimeAverageVolumeOutput, ) # Get outputFields + output_fields = [] + + output_fields = append_component_to_output_fields( + get_global_setting_from_first_instance( + output_params, volume_output_class, "output_fields" + ).model_dump()["items"] + ) + + for output_field in get_global_setting_from_first_instance( + output_params, volume_output_class, "output_fields" + ).items: + if isinstance(output_field, UserVariable): + output_fields.append(output_field.name) + # Filter out the UserVariable Dicts + output_fields = [item for item in output_fields if isinstance(item, str)] volume_output.update( { - "outputFields": append_component_to_output_fields( - get_global_setting_from_first_instance( - output_params, volume_output_class, "output_fields" - ).model_dump()["items"] - ), + "outputFields": output_fields, } ) return volume_output @@ -511,7 +543,95 @@ def translate_acoustic_output(output_params: list): return None +def user_variable_to_udf(variable: UserVariable, input_params: SimulationParams): + # pylint:disable=too-many-statements + """Convert user variable to UDF""" + if not isinstance(variable.value, Expression): + # Likely number of unyt object + # We should add validator for this for output fields. + raise ValueError("Did not find expression in user variable") + + numerical_value = variable.value.evaluate(raise_on_non_evaluable=False, force_evaluate=True) + + is_constant = False + if isinstance(numerical_value, Number) and not np.isnan(numerical_value): # not NaN + is_constant = True + elif isinstance(numerical_value, u.unyt_quantity) and not np.isnan(numerical_value.value): + is_constant = True + elif isinstance(numerical_value, u.unyt_array) and not np.any(np.isnan(numerical_value.value)): + is_constant = True + + if is_constant: + raise ValueError("Constant value found in user variable.") + + def _compute_coefficient_and_offset(source_unit: u.Unit, target_unit: u.Unit): + y2 = (2.0 * target_unit).in_units(source_unit).value + y1 = (1.0 * target_unit).in_units(source_unit).value + x2 = 2.0 + x1 = 1.0 + + coefficient = (y2 - y1) / (x2 - x1) + offset = y1 / coefficient - x1 + + return coefficient, offset + + def _prepare_prepending_code(expression: Expression): + prepending_code = [] + for name in expression.solver_variable_names(): + if not udf_prepending_code.get(name): + continue + prepending_code.append(udf_prepending_code[name]) + prepending_code = "".join(prepending_code) + return prepending_code + + expression: Expression = variable.value + + requested_unit: Union[u.Unit, None] = expression.get_output_units(input_params=input_params) + if requested_unit is None: + # Number constant output requested + coefficient = 1 + offset = 0 + else: + flow360_unit_system = input_params.flow360_unit_system + # Note: Effectively assuming that all the solver vars uses radians and also the expressions expect radians + flow360_unit_system["angle"] = u.rad # pylint:disable=no-member + flow360_unit = flow360_unit_system[requested_unit.dimensions] + coefficient, offset = _compute_coefficient_and_offset( + source_unit=requested_unit, target_unit=flow360_unit + ) + + expression_length = expression.length + prepending_code = _prepare_prepending_code(expression=expression) + + if expression_length == 1: + expression = expression.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + if offset != 0: + expression = (expression + offset) * coefficient + else: + expression = expression * coefficient + expression = expression.to_solver_code(params=input_params) + return UserDefinedField( + name=variable.name, expression=f"{prepending_code}{variable.name} = " + expression + ";" + ) + + # Vector output requested + expression = [ + expression[i].evaluate(raise_on_non_evaluable=False, force_evaluate=False) + for i in range(expression_length) + ] + if offset != 0: + expression = [(item + offset) * coefficient for item in expression] + else: + expression = [item * coefficient for item in expression] + expression = [item.to_solver_code(params=input_params) for item in expression] + expression = [f"{variable.name}[{i}] = " + item for i, item in enumerate(expression)] + return UserDefinedField( + name=variable.name, expression=prepending_code + "; ".join(expression) + ";" + ) + + def process_output_fields_for_udf(input_params: SimulationParams): + # pylint:disable=too-many-branches """ Process all output fields from different output types and generate additional UserDefinedFields for dimensioned fields. @@ -549,7 +669,18 @@ def process_output_fields_for_udf(input_params: SimulationParams): if udf_expression: generated_udfs.append(UserDefinedField(name=field_name, expression=udf_expression)) - return generated_udfs + # UserVariable handling: + user_variable_udfs = {} + if input_params.outputs: + for output in input_params.outputs: + if not hasattr(output, "output_fields") or not output.output_fields: + continue + for output_field in output.output_fields.items: + if not isinstance(output_field, UserVariable): + continue + udf_from_user_variable = user_variable_to_udf(output_field, input_params) + user_variable_udfs[udf_from_user_variable.name] = udf_from_user_variable + return generated_udfs + list(user_variable_udfs.values()) def translate_streamline_output(output_params: list): @@ -1032,7 +1163,10 @@ def get_solver_json( translated = {} ##:: Step 1: Get geometry: if input_params.reference_geometry: - geometry = remove_units_in_dict(dump_dict(input_params.reference_geometry)) + geometry = inline_expressions_in_dict( + dump_dict(input_params.reference_geometry), input_params + ) + geometry = remove_units_in_dict(geometry) translated["geometry"] = {} if input_params.reference_geometry.area is not None: translated["geometry"]["refArea"] = geometry["area"] @@ -1087,7 +1221,7 @@ def get_solver_json( "physicalSteps": ts.steps, "orderOfAccuracy": ts.order_of_accuracy, "maxPseudoSteps": ts.max_pseudo_steps, - "timeStepSize": ts.step_size.value.item(), + "timeStepSize": translate_value_or_expression_object(ts.step_size, input_params), } elif isinstance(ts, Steady): translated["timeStepping"] = { diff --git a/flow360/component/simulation/translator/user_expression_utils.py b/flow360/component/simulation/translator/user_expression_utils.py new file mode 100644 index 000000000..0c49e98b0 --- /dev/null +++ b/flow360/component/simulation/translator/user_expression_utils.py @@ -0,0 +1,113 @@ +"""Utilities for user expression translation.""" + +import numpy as np + +udf_prepending_code = { + "solution.Cp": "double ___Cp = (primitiveVars[4] - pressureFreestream) / (0.5 * MachRef * MachRef);", + "solution.Cpt": "double ___MachTmp = sqrt(primitiveVars[1] * primitiveVars[1] + " + + "primitiveVars[2] * primitiveVars[2] + primitiveVars[3] * primitiveVars[3]) / " + + "sqrt(1.4 * primitiveVars[4] / primitiveVars[0]);" + + "double ___Cpt = (1.4 * primitiveVars[4] * pow(1.0 + (1.4 - 1.0) / 2. * ___MachTmp * ___MachTmp," + + "1.4 / (1.4 - 1.0)) - pow(1.0 + (1.4 - 1.0) / 2. * MachRef * MachRef," + + "1.4 / (1.4 - 1.0))) / (0.5 * 1.4 * MachRef * MachRef);", + "solution.grad_density": "double ___grad_density[3]; ___grad_density[0] = gradPrimitive[0][0];" + + "___grad_density[1] = gradPrimitive[0][1];" + + "___grad_density[2] = gradPrimitive[0][2];", + "solution.grad_u": "double ___grad_u[3];" + + "___grad_u[0] = gradPrimitive[1][0] * velocityScale;" + + "___grad_u[1] = gradPrimitive[1][1] * velocityScale;" + + "___grad_u[2] = gradPrimitive[1][2] * velocityScale;", + "solution.grad_v": "double ___grad_v[3];" + + "___grad_v[0] = gradPrimitive[2][0] * velocityScale;" + + "___grad_v[1] = gradPrimitive[2][1] * velocityScale;" + + "___grad_v[2] = gradPrimitive[2][2] * velocityScale;", + "solution.grad_w": "double ___grad_w[3];" + + "___grad_w[0] = gradPrimitive[3][0] * velocityScale;" + + "___grad_w[1] = gradPrimitive[3][1] * velocityScale;" + + "___grad_w[2] = gradPrimitive[3][2] * velocityScale;", + "solution.grad_pressure": "double ___grad_pressure[3];" + + "___grad_pressure[0] = gradPrimitive[4][0];" + + "___grad_pressure[1] = gradPrimitive[4][1];" + + "___grad_pressure[2] = gradPrimitive[4][2];", + "solution.Mach": "double ___Mach;" + + "___Mach = usingLiquidAsMaterial ? 0 : " + + "sqrt(primitiveVars[1] * primitiveVars[1] + " + + "primitiveVars[2] * primitiveVars[2] + " + + "primitiveVars[3] * primitiveVars[3]) / " + + "sqrt(1.4 * primitiveVars[4] / primitiveVars[0]);", + "solution.mut_ratio": "double ___mut_ratio; ___mut_ratio = mut / mu;", + "solution.nu_hat": "double ___nu_hat;___nu_hat = solutionTurbulence * velocityScale;", + "solution.turbulence_kinetic_energy": "double ___turbulence_kinetic_energy;" + "___turbulence_kinetic_energy = solutionTurbulence[0] * pow(velocityScale, 2);", + "solution.specific_rate_of_dissipation": "double ___specific_rate_of_dissipation;" + + "___specific_rate_of_dissipation = solutionTurbulence[1] * velocityScale;", + "solution.velocity": "double ___velocity[3];" + + "___velocity[0] = primitiveVars[1] * velocityScale;" + + "___velocity[1] = primitiveVars[2] * velocityScale;" + + "___velocity[2] = primitiveVars[3] * velocityScale;", + "solution.qcriterion": "double ___qcriterion;" + + "double ___ux = gradPrimitive[1][0];" + + "double ___uy = gradPrimitive[1][1];" + + "double ___uz = gradPrimitive[1][2];" + + "double ___vx = gradPrimitive[2][0];" + + "double ___vy = gradPrimitive[2][1];" + + "double ___vz = gradPrimitive[2][2];" + + "double ___wx = gradPrimitive[3][0];" + + "double ___wy = gradPrimitive[3][1];" + + "double ___wz = gradPrimitive[3][2];" + + "double ___str11 = ___ux;" + + "double ___str22 = ___vy;" + + "double ___str33 = ___wz;" + + "double ___str12 = 0.5 * (___uy + ___vx);" + + "double ___str13 = 0.5 * (___uz + ___wx);" + + "double ___str23 = 0.5 * (___vz + ___wy);" + + "double ___str_norm = ___str11 * ___str11 + ___str22 * ___str22 + ___str33 * ___str33 + " + + "2 * (___str12 * ___str12) + 2 * (___str13 * ___str13) + 2 * (___str23 * ___str23);" + + "double ___omg12 = 0.5 * (___uy - ___vx);" + + "double ___omg13 = 0.5 * (___uz - ___wx);" + + "double ___omg23 = 0.5 * (___vz - ___wy);" + + "double ___omg_norm = 2 * (___omg12 * ___omg12) + 2 * (___omg13 * ___omg13) + 2 * (___omg23 * ___omg23);" + + "___qcriterion = 0.5 * (___omg_norm - ___str_norm) * (velocityScale * velocityScale);", + "solution.entropy": "double ___entropy;" + + "___entropy = log(primitiveVars[4] / (1.0 / 1.4) / pow(primitiveVars[0], 1.4));", + "solution.temperature": "double ___temperature;" + + f"double ___epsilon = {np.finfo(np.float64).eps};" + "___temperature = (primitiveVars[0] < ___epsilon && HeatEquation_solution != nullptr) ? " + "HeatEquation_solution[0] : primitiveVars[4] / (primitiveVars[0] * (1.0 / 1.4));", + "solution.vorticity": "double ___vorticity[3];" + + "___vorticity[0] = (gradPrimitive[3][1] - gradPrimitive[2][2]) * velocityScale;" + + "___vorticity[1] = (gradPrimitive[1][2] - gradPrimitive[3][0]) * velocityScale;" + + "___vorticity[2] = (gradPrimitive[2][0] - gradPrimitive[1][1]) * velocityScale;", + "solution.CfVec": "double ___CfVec[3]; for (int i = 0; i < 3; i++)" + + "{___CfVec[i] = wallShearStress[i] / (0.5 * MachRef * MachRef);}", + "solution.Cf": "double ___Cf;" + + "___Cf = magnitude(wallShearStress) / (0.5 * MachRef * MachRef);", + "solution.node_forces_per_unit_area": "double ___node_forces_per_unit_area[3];" + + "double ___normalMag = magnitude(nodeNormals);" + + "for (int i = 0; i < 3; i++){___node_forces_per_unit_area[i] = " + + "((primitiveVars[4] - pressureFreestream) * nodeNormals[i] / ___normalMag + wallViscousStress[i])" + + " * (velocityScale * velocityScale);}", + "solution.heat_transfer_coefficient_static_temperature": "double ___heat_transfer_coefficient_static_temperature;" + + "double ___temperatureTmp = " + + "primitiveVars[4] / (primitiveVars[0] * 1.0 / 1.4);" + + f"double ___epsilon = {np.finfo(np.float64).eps};" + + "double ___temperatureSafeDivide = (___temperatureTmp - 1.0 < 0) ? " + + "___temperatureTmp - 1.0 - ___epsilon : " + + "___temperatureTmp - 1.0 + ___epsilon;" + + "___heat_transfer_coefficient_static_temperature = " + + "abs(___temperatureTmp - 1.0) > ___epsilon ? " + + "- wallHeatFlux / ___temperatureSafeDivide : 1.0 / ___epsilon;", + "solution.heat_transfer_coefficient_total_temperature": "double ___heat_transfer_coefficient_total_temperature;" + + "double ___temperatureTmp = " + + "primitiveVars[4] / (primitiveVars[0] * 1.0 / 1.4);" + + "double ___temperatureTotal = 1.0 + (1.4 - 1.0) / 2.0 * MachRef * MachRef;" + + f"double ___epsilon = {np.finfo(np.float64).eps};" + + "double ___temperatureSafeDivide = (___temperatureTmp - ___temperatureTotal < 0) ? " + + "___temperatureTmp - ___temperatureTotal - ___epsilon : " + + "___temperatureTmp - ___temperatureTotal + ___epsilon;" + + "double ___heat_transfer_coefficient_total_temperature = " + + "abs(___temperatureTmp - ___temperatureTotal) > ___epsilon ? " + + "___temperatureTotal = - wallHeatFlux / ___temperatureSafeDivide : 1.0 / ___epsilon;", + "solution.wall_shear_stress_magnitude": "double ___wall_shear_stress_magnitude;" + + "___wall_shear_stress_magnitude = magnitude(wallShearStress);", +} diff --git a/flow360/component/simulation/translator/utils.py b/flow360/component/simulation/translator/utils.py index cce76f33c..ccee88def 100644 --- a/flow360/component/simulation/translator/utils.py +++ b/flow360/component/simulation/translator/utils.py @@ -7,6 +7,9 @@ from collections import OrderedDict from typing import Union +import numpy as np +import unyt as u + from flow360.component.simulation.framework.entity_base import EntityBase, EntityList from flow360.component.simulation.framework.unique_list import UniqueItemList from flow360.component.simulation.primitives import ( @@ -15,6 +18,7 @@ ) from flow360.component.simulation.simulation_params import SimulationParams from flow360.component.simulation.unit_system import LengthType +from flow360.component.simulation.user_code.core.types import Expression from flow360.component.simulation.utils import is_exact_instance @@ -116,10 +120,13 @@ def convert_tuples_to_lists(input_dict): def remove_units_in_dict(input_dict): """Remove units from a dimensioned value.""" - unit_keys = {"value", "units"} + + def _is_unyt_or_unyt_like_obj(value): + return "value" in value.keys() and "units" in value.keys() + if isinstance(input_dict, dict): new_dict = {} - if input_dict.keys() == unit_keys: + if _is_unyt_or_unyt_like_obj(input_dict): new_dict = input_dict["value"] if input_dict["units"].startswith("flow360_") is False: raise ValueError( @@ -127,7 +134,7 @@ def remove_units_in_dict(input_dict): ) return new_dict for key, value in input_dict.items(): - if isinstance(value, dict) and value.keys() == unit_keys: + if isinstance(value, dict) and _is_unyt_or_unyt_like_obj(value): if value["units"].startswith("flow360_") is False: raise ValueError( f"[Internal Error] Unit {value['units']} is not non-dimensionalized." @@ -141,6 +148,50 @@ def remove_units_in_dict(input_dict): return input_dict +def translate_value_or_expression_object( + obj: Union[Expression, u.unyt_quantity, u.unyt_array], input_params: SimulationParams +): + """Translate for an ValueOrExpression object""" + if isinstance(obj, Expression): + # Only allowing client-time evaluable expressions + evaluated = obj.evaluate(raise_on_non_evaluable=True) + converted = input_params.convert_unit(evaluated, "flow360").v.item() + return converted + # Non dimensionalized unyt objects + return obj.value.item() + + +def inline_expressions_in_dict(input_dict, input_params): + """Inline all client-time evaluable expressions in the provided dict to their evaluated values""" + if isinstance(input_dict, dict): + new_dict = {} + if "expression" in input_dict.keys(): + expression = Expression(expression=input_dict["expression"]) + evaluated = expression.evaluate(raise_on_non_evaluable=False) + converted = input_params.convert_unit(evaluated, "flow360").v + new_dict = converted + return new_dict + for key, value in input_dict.items(): + # For number-type fields the schema should match dimensioned unit fields + # so remove_units_in_dict should handle them correctly... + if isinstance(value, dict) and "expression" in value.keys(): + expression = Expression(expression=value["expression"]) + evaluated = expression.evaluate(raise_on_non_evaluable=False) + converted = input_params.convert_unit(evaluated, "flow360").v + if isinstance(converted, np.ndarray): + if converted.ndim == 0: + converted = float(converted) + else: + converted = converted.tolist() + new_dict[key] = converted + else: + new_dict[key] = inline_expressions_in_dict(value, input_params) + return new_dict + if isinstance(input_dict, list): + return [inline_expressions_in_dict(item, input_params) for item in input_dict] + return input_dict + + def has_instance_in_list(obj_list: list, class_type): """Check if a list contains an instance of a given type.""" if obj_list is not None: diff --git a/flow360/component/simulation/unit_system.py b/flow360/component/simulation/unit_system.py index 719948e7d..8a23fb35f 100644 --- a/flow360/component/simulation/unit_system.py +++ b/flow360/component/simulation/unit_system.py @@ -180,6 +180,24 @@ def _is_unit_validator(value): return value +def _list_of_unyt_quantity_to_unyt_array(value): + """ + Convert list of unyt_quantity (may come from `Expression`) to unyt_array + Only handles situation where all components share exact same unit. + We cab relax this to cover more expression results in the future when we decide how to convert. + """ + + if not isinstance(value, list): + return value + if not all(isinstance(item, unyt_quantity) for item in value): + return value + units = {item.units for item in value} + if not len(units) == 1: + return value + shared_unit = units.pop() + return [item.value for item in value] * shared_unit + + # pylint: disable=too-many-return-statements def _unit_inference_validator(value, dim_name, is_array=False, is_matrix=False): """ @@ -491,10 +509,11 @@ def __get_pydantic_json_schema__( return schema - def validate(vec_cls, value, *args, **kwargs): + def validate(vec_cls, value, info, *args, **kwargs): """additional validator for value""" try: value = _unit_object_parser(value, [u.unyt_array, _Flow360BaseUnit.factory]) + value = _list_of_unyt_quantity_to_unyt_array(value) value = _is_unit_validator(value) is_collection = _check_if_input_is_nested_collection(value=value, nest_level=1) @@ -524,7 +543,12 @@ def validate(vec_cls, value, *args, **kwargs): value, vec_cls.type.dim, vec_cls.type.expect_delta_unit ) - if kwargs.get("allow_inf_nan", False) is False: + allow_inf_nan = kwargs.get("allow_inf_nan", False) + + if info.context and "allow_inf_nan" in info.context: + allow_inf_nan = info.context.get("allow_inf_nan", False) + + if allow_inf_nan is False: value = _nan_inf_vector_validator(value) value = _has_dimensions_validator( @@ -539,9 +563,10 @@ def validate(vec_cls, value, *args, **kwargs): raise pd.ValidationError.from_exception_data("validation error", [details]) def __get_pydantic_core_schema__(vec_cls, *args, **kwargs) -> pd.CoreSchema: - return core_schema.no_info_plain_validator_function( - lambda *val_args: validate(vec_cls, *val_args) - ) + def validate_with_info(value, info): + return validate(vec_cls, value, info, *args, **kwargs) + + return core_schema.with_info_plain_validator_function(validate_with_info) cls_obj = type("_VectorType", (), {}) cls_obj.type = dim_type @@ -1596,7 +1621,16 @@ def defaults(self): def __getitem__(self, item): """to support [] access""" - return getattr(self, item) + try: + return getattr(self, item) + except TypeError: + # Allowing usage like [(mass)/(time)] + for attr_name, attr in vars(self).items(): + if not isinstance(attr, unyt_quantity): + continue + if attr.units.dimensions == item: + return getattr(self, attr_name) + raise AttributeError(f"'{item}' is not a valid attribute of {self.__class__.__name__}. ") def system_repr(self): """(mass, length, time, temperature) string representation of the system""" diff --git a/flow360/component/simulation/user_code/__init__.py b/flow360/component/simulation/user_code/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/flow360/component/simulation/user_code/core/__init__.py b/flow360/component/simulation/user_code/core/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/flow360/component/simulation/user_code/core/context.py b/flow360/component/simulation/user_code/core/context.py new file mode 100644 index 000000000..49041b486 --- /dev/null +++ b/flow360/component/simulation/user_code/core/context.py @@ -0,0 +1,158 @@ +"""Context handler module""" + +from typing import Any + +from unyt import Unit, unit_symbols, unyt_array + +from flow360.component.simulation.blueprint.core import EvaluationContext +from flow360.component.simulation.blueprint.core.resolver import CallableResolver + + +def _unit_list(): + """Import a list of available unit symbols from the unyt module""" + + symbols = set() + + for _, value in unit_symbols.__dict__.items(): + if isinstance(value, (unyt_array, Unit)): + symbols.add(str(value)) + + return list(symbols) + + +def _import_units(_) -> Any: + """Import and return allowed unit callables""" + # pylint:disable=import-outside-toplevel + from flow360.component.simulation import units as u + + return u + + +def _import_math(_) -> Any: + """Import and return allowed function callables""" + # pylint:disable=import-outside-toplevel, cyclic-import + from flow360.component.simulation.user_code.functions import math + + return math + + +def _import_control(_) -> Any: + """Import and return allowed control variable callables""" + # pylint:disable=import-outside-toplevel, cyclic-import + from flow360.component.simulation.user_code.variables import control + + return control + + +def _import_solution(_) -> Any: + """Import and return allowed solution variable callables""" + # pylint:disable=import-outside-toplevel, cyclic-import + from flow360.component.simulation.user_code.variables import solution + + return solution + + +WHITELISTED_CALLABLES = { + "flow360_math": {"prefix": "fn.", "callables": ["cross"], "evaluate": True}, + "flow360.units": {"prefix": "u.", "callables": _unit_list(), "evaluate": True}, + "flow360.control": { + "prefix": "control.", + "callables": [ + "MachRef", + "Tref", + "t", + "physicalStep", + "pseudoStep", + "timeStepSize", + "alphaAngle", + "betaAngle", + "pressureFreestream", + "momentLengthX", + "momentLengthY", + "momentLengthZ", + "momentCenterX", + "momentCenterY", + "momentCenterZ", + "theta", + "omega", + "omegaDot", + ], + "evaluate": False, + }, + "flow360.solution": { + "prefix": "solution.", + "callables": [ + # pylint: disable=fixme + # TODO: Auto-populate this list from the solution module + "coordinate", + "Cp", + "Cpt", + "grad_density", + "grad_u", + "grad_v", + "grad_w", + "grad_pressure", + "Mach", + "mut", + "mut_ratio", + "nu_hat", + "turbulence_kinetic_energy", + "specific_rate_of_dissipation", + "amplification_factor", + "turbulence_intermittency", + "density", + "velocity", + "pressure", + "qcriterion", + "entropy", + "temperature", + "vorticity", + "wall_distance", + "CfVec", + "Cf", + "heatflux", + "node_normals", + "node_forces_per_unit_area", + "y_plus", + "wall_shear_stress_magnitude", + "heat_transfer_coefficient_static_temperature", + "heat_transfer_coefficient_total_temperature", + ], + "evaluate": False, + }, +} + +# Define allowed modules +ALLOWED_MODULES = {"u", "fl", "control", "solution", "math"} + +ALLOWED_CALLABLES = { + **{ + f"{group['prefix']}{callable}": None + for group in WHITELISTED_CALLABLES.values() + for callable in group["callables"] + }, +} + +EVALUATION_BLACKLIST = { + **{ + f"{group['prefix']}{callable}": None + for group in WHITELISTED_CALLABLES.values() + for callable in group["callables"] + if not group["evaluate"] + }, +} + +# Note: Keys of IMPORT_FUNCTIONS needs to be consistent with ALLOWED_MODULES +IMPORT_FUNCTIONS = { + "u": _import_units, + "math": _import_math, + "control": _import_control, + "solution": _import_solution, +} + +default_context = EvaluationContext( + CallableResolver(ALLOWED_CALLABLES, ALLOWED_MODULES, IMPORT_FUNCTIONS, EVALUATION_BLACKLIST) +) + +user_variables: set[str] = set() +solver_variable_name_map: dict[str, str] = {} diff --git a/flow360/component/simulation/user_code/core/types.py b/flow360/component/simulation/user_code/core/types.py new file mode 100644 index 000000000..cc1e7e885 --- /dev/null +++ b/flow360/component/simulation/user_code/core/types.py @@ -0,0 +1,898 @@ +"""This module allows users to write serializable, evaluable symbolic code for use in simulation params""" + +from __future__ import annotations + +import ast +import re +from numbers import Number +from typing import Annotated, Any, Generic, List, Literal, Optional, TypeVar, Union + +import numpy as np +import pydantic as pd +import unyt as u +from pydantic import BeforeValidator, Discriminator, PlainSerializer, Tag +from pydantic_core import InitErrorDetails, core_schema +from typing_extensions import Self +from unyt import Unit, unyt_array, unyt_quantity + +from flow360.component.simulation.blueprint import Evaluable, expr_to_model +from flow360.component.simulation.blueprint.core import EvaluationContext, expr_to_code +from flow360.component.simulation.blueprint.core.types import TargetSyntax +from flow360.component.simulation.framework.base_model import Flow360BaseModel +from flow360.component.simulation.user_code.core.context import default_context +from flow360.component.simulation.user_code.core.utils import ( + handle_syntax_error, + is_number_string, + split_keep_delimiters, +) + +_solver_variables: set[str] = set() + + +class VariableContextInfo(Flow360BaseModel): + """Variable context info for project variables.""" + + name: str + value: ValueOrExpression[AnyNumericType] + postProcessing: bool = pd.Field() + + +def save_user_variables(params): + """ + Save user variables to the project variables. + Declared here since I do not want to import default_context everywhere. + """ + # Get all output variables: + post_processing_variables = set() + for item in params.outputs if params.outputs else []: + if not "output_fields" in item.__class__.model_fields: + continue + for item in item.output_fields.items: + if isinstance(item, UserVariable): + post_processing_variables.add(item.name) + + params.private_attribute_asset_cache.project_variables = [ + VariableContextInfo( + name=name, value=value, postProcessing=name in post_processing_variables + ) + for name, value in default_context._values.items() # pylint: disable=protected-access + if "." not in name # Skipping scoped variables (non-user variables) + ] + return params + + +def update_global_context(value: List[VariableContextInfo]): + """Once the project variables are validated, update the global context.""" + + for item in value: + default_context.set(item.name, item.value) + return value + + +def __soft_fail_add__(self, other): + if not isinstance(other, Expression) and not isinstance(other, Variable): + return np.ndarray.__add__(self, other) + return NotImplemented + + +def __soft_fail_sub__(self, other): + if not isinstance(other, Expression) and not isinstance(other, Variable): + return np.ndarray.__sub__(self, other) + return NotImplemented + + +def __soft_fail_mul__(self, other): + if not isinstance(other, Expression) and not isinstance(other, Variable): + return np.ndarray.__mul__(self, other) + return NotImplemented + + +def __soft_fail_truediv__(self, other): + if not isinstance(other, Expression) and not isinstance(other, Variable): + return np.ndarray.__truediv__(self, other) + return NotImplemented + + +unyt_array.__add__ = __soft_fail_add__ +unyt_array.__sub__ = __soft_fail_sub__ +unyt_array.__mul__ = __soft_fail_mul__ +unyt_array.__truediv__ = __soft_fail_truediv__ + + +def _convert_numeric(value): + arg = None + unit_delimiters = ["+", "-", "*", "/", "(", ")"] + if isinstance(value, Number): + arg = str(value) + elif isinstance(value, Unit): + unit = str(value) + tokens = split_keep_delimiters(unit, unit_delimiters) + arg = "" + for token in tokens: + if token not in unit_delimiters and not is_number_string(token): + token = f"u.{token}" + arg += token + else: + arg += token + elif isinstance(value, unyt_array): + unit = str(value.units) + tokens = split_keep_delimiters(unit, unit_delimiters) + arg = f"{_convert_argument(value.value.tolist())[0]} * " + for token in tokens: + if token not in unit_delimiters and not is_number_string(token): + token = f"u.{token}" + arg += token + else: + arg += token + elif isinstance(value, list): + arg = f"[{','.join([_convert_argument(item)[0] for item in value])}]" + return arg + + +def _convert_argument(value): + parenthesize = False + arg = _convert_numeric(value) + if isinstance(value, Expression): + arg = value.expression + parenthesize = True + elif isinstance(value, Variable): + arg = value.name + if not arg: + raise ValueError(f"Incompatible argument of type {type(value)}") + return arg, parenthesize + + +class SerializedValueOrExpression(Flow360BaseModel): + """Serialized frontend-compatible format of an arbitrary value/expression field""" + + type_name: Literal["number", "expression"] = pd.Field() + value: Optional[Union[Number, list[Number]]] = pd.Field(None) + units: Optional[str] = pd.Field(None) + expression: Optional[str] = pd.Field(None) + evaluated_value: Union[Optional[Number], list[Optional[Number]]] = pd.Field(None) + evaluated_units: Optional[str] = pd.Field(None) + output_units: Optional[str] = pd.Field(None, description="See definition in `Expression`.") + + +# This is a wrapper to allow using unyt arrays with pydantic models +class UnytArray(unyt_array): + """UnytArray wrapper to enable pydantic compatibility""" + + def __repr__(self): + return f"UnytArray({str(self)})" + + # pylint: disable=unused-argument + @classmethod + def __get_pydantic_core_schema__(cls, source_type, handler): + return core_schema.no_info_plain_validator_function(cls.validate) + + @classmethod + def validate(cls, value: Any): + """Minimal validator for pydantic compatibility""" + if isinstance(value, unyt_array): + return value + raise ValueError(f"Cannot convert {type(value)} to UnytArray") + + +AnyNumericType = Union[float, UnytArray, list] + + +def check_vector_arithmetic(func): + """Decorator to check if vector arithmetic is being attempted and raise an error if so.""" + + def wrapper(self, other): + def is_array(item): + if isinstance(item, unyt_array) and item.shape != (): + return True + if isinstance(item, list): + return True + return False + + if is_array(self.value) or is_array(other): + raise ValueError( + f"Vector operation ({func.__name__} between {self.name} and {other}) not " + "supported for variables. Please write expression for each component." + ) + return func(self, other) + + return wrapper + + +def _check_cyclic_dependencies(*, variable_name: str) -> None: + visited = set() + stack = [(variable_name, [variable_name])] + while stack: + (current_name, current_path) = stack.pop() + current_value = default_context.get(current_name) + if isinstance(current_value, Expression): + used_names = current_value.user_variable_names() + if [name for name in used_names if name in current_path]: + path_string = " -> ".join(current_path + [current_path[0]]) + details = InitErrorDetails( + type="value_error", + ctx={"error": f"Cyclic dependency between variables {path_string}"}, + ) + raise pd.ValidationError.from_exception_data("Variable value error", [details]) + stack.extend( + [(name, current_path + [name]) for name in used_names if name not in visited] + ) + + +class Variable(Flow360BaseModel): + """Base class representing a symbolic variable""" + + name: str = pd.Field(frozen=True) + + model_config = pd.ConfigDict(validate_assignment=True, extra="allow") + + @property + def value(self): + """ + Get the value of the variable from the global context. + """ + return default_context.get(self.name) + + @value.setter + def value(self, value): + """ + Set the value of the variable in the global context. + In parallel to `set_value` this supports syntax like `my_user_var.value = 10.0`. + """ + new_value = pd.TypeAdapter(ValueOrExpression[AnyNumericType]).validate_python(value) + # Not checking overwrite here since it is user controlled explicit assignment operation + default_context.set(self.name, new_value) + _check_cyclic_dependencies(variable_name=self.name) + + @pd.model_validator(mode="before") + @classmethod + def set_value(cls, values): + """ + Supporting syntax like `a = fl.Variable(name="a", value=1)`. + """ + if "name" not in values: + raise ValueError("`name` is required for variable declaration.") + + if "value" in values: + new_value = pd.TypeAdapter(ValueOrExpression[AnyNumericType]).validate_python( + values.pop("value") + ) + # Check overwriting, skip for solver variables: + if values["name"] in default_context.user_variable_names: + diff = new_value != default_context.get(values["name"]) + + if isinstance(diff, np.ndarray): + diff = diff.any() + + if isinstance(diff, list): + # Might not end up here but just in case + diff = any(diff) + + if diff: + raise ValueError( + f"Redeclaring user variable {values['name']} with new value: {new_value}. " + f"Previous value: {default_context.get(values['name'])}" + ) + # Call the setter + default_context.set( + values["name"], + new_value, + ) + _check_cyclic_dependencies(variable_name=values["name"]) + + return values + + @check_vector_arithmetic + def __add__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.name} + {str_arg}") + + @check_vector_arithmetic + def __sub__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.name} - {str_arg}") + + @check_vector_arithmetic + def __mul__(self, other): + if isinstance(other, Number) and other == 0: + return Expression(expression="0") + + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.name} * {str_arg}") + + @check_vector_arithmetic + def __truediv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.name} / {str_arg}") + + @check_vector_arithmetic + def __floordiv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.name} // {str_arg}") + + @check_vector_arithmetic + def __mod__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.name} % {str_arg}") + + @check_vector_arithmetic + def __pow__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.name} ** {str_arg}") + + def __neg__(self): + return Expression(expression=f"-{self.name}") + + def __pos__(self): + return Expression(expression=f"+{self.name}") + + def __abs__(self): + return Expression(expression=f"abs({self.name})") + + @check_vector_arithmetic + def __radd__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} + {self.name}") + + @check_vector_arithmetic + def __rsub__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} - {self.name}") + + @check_vector_arithmetic + def __rmul__(self, other): + if isinstance(other, Number) and other == 0: + return Expression(expression="0") + + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} * {self.name}") + + @check_vector_arithmetic + def __rtruediv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} / {self.name}") + + @check_vector_arithmetic + def __rfloordiv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} // {self.name}") + + @check_vector_arithmetic + def __rmod__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} % {self.name}") + + @check_vector_arithmetic + def __rpow__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} ** {self.name}") + + def __getitem__(self, item): + (arg, _) = _convert_argument(item) + return Expression(expression=f"{self.name}[{arg}]") + + def __str__(self): + # pylint:disable=invalid-str-returned + return self.name + + def __repr__(self): + return f"Variable({self.name} = {self.value})" + + def __hash__(self): + return hash(self.name) + + def __eq__(self, other): + # NaN-compatible equal operator for unit test support + if not isinstance(other, Variable): + return False + return self.model_dump_json() == other.model_dump_json() + + +class UserVariable(Variable): + """Class representing a user-defined symbolic variable""" + + name: str = pd.Field(frozen=True) + + type_name: Literal["UserVariable"] = pd.Field("UserVariable", frozen=True) + + @pd.field_validator("name", mode="after") + @classmethod + def check_unscoped_name(cls, v): + """Ensure that the variable name is not scoped. Only solver side variables can be scoped.""" + if "." in v: + raise ValueError( + "User variable name cannot contain dots (scoped variables not supported)." + ) + return v + + def __hash__(self): + """ + Support for set and deduplicate. + """ + return hash(self.model_dump_json()) + + def in_unit(self, new_unit: Union[str, Unit] = None): + """Requesting the output of the variable to be in the given (new_unit) units.""" + if isinstance(new_unit, Unit): + new_unit = str(new_unit) + self.value.output_units = new_unit + return self + + +class SolverVariable(Variable): + """Class representing a pre-defined symbolic variable that cannot be evaluated at client runtime""" + + type_name: Literal["SolverVariable"] = pd.Field("SolverVariable", frozen=True) + solver_name: Optional[str] = pd.Field(None) + variable_type: Literal["Volume", "Surface", "Scalar"] = pd.Field() + + @pd.model_validator(mode="after") + def update_context(self): + """Auto updating context when new variable is declared""" + default_context.set(self.name, self.value, Variable) + _solver_variables.add(self.name) + if self.solver_name: + default_context.set_alias(self.name, self.solver_name) + return self + + def in_unit(self, new_name: str, new_unit: Union[str, Unit] = None): + """ + Return a UserVariable that will generate results in the new_unit. + If new_unit is not specified then the unit will be determined by the unit system. + """ + if isinstance(new_unit, Unit): + new_unit = str(new_unit) + new_variable = UserVariable( + name=new_name, + value=Expression(expression=self.name), + ) + new_variable.value.output_units = new_unit # pylint:disable=assigning-non-slot + return new_variable + + +class Expression(Flow360BaseModel, Evaluable): + """ + A symbolic, validated representation of a mathematical expression. + + This model wraps a string-based expression, ensures its syntax and semantics + against the global evaluation context, and provides methods to: + - evaluate its numeric/unyt result (`evaluate`) + - list user-defined variables it references (`user_variables` / `user_variable_names`) + - emit C++ solver code (`to_solver_code`) + """ + + expression: str = pd.Field("") + output_units: Optional[str] = pd.Field( + None, + description="String representation of what the requested units the evaluated expression should be " + "when `self` is used as an output field. By default the output units will be inferred from the unit " + "system associated with SimulationParams", + ) + + model_config = pd.ConfigDict(validate_assignment=True) + + @pd.model_validator(mode="before") + @classmethod + def _validate_expression(cls, value) -> Self: + output_units = None + if isinstance(value, str): + expression = value + elif isinstance(value, dict) and "expression" in value.keys(): + expression = value["expression"] + output_units = value.get("output_units") + elif isinstance(value, Expression): + expression = str(value) + output_units = value.output_units + elif isinstance(value, Variable): + expression = str(value) + if isinstance(value.value, Expression): + output_units = value.value.output_units + elif isinstance(value, list): + expression = f"[{','.join([_convert_argument(item)[0] for item in value])}]" + else: + details = InitErrorDetails( + type="value_error", ctx={"error": f"Invalid type {type(value)}"} + ) + raise pd.ValidationError.from_exception_data("Expression type error", [details]) + try: + # To ensure the expression is valid (also checks for + expr_to_model(expression, default_context) + # To reduce unnecessary parentheses + expression = ast.unparse(ast.parse(expression)) + except SyntaxError as s_err: + handle_syntax_error(s_err, expression) + except ValueError as v_err: + details = InitErrorDetails(type="value_error", ctx={"error": v_err}) + raise pd.ValidationError.from_exception_data("Expression value error", [details]) + + return {"expression": expression, "output_units": output_units} + + @pd.field_validator("expression", mode="after") + @classmethod + def remove_leading_and_trailing_whitespace(cls, value: str) -> str: + """Remove leading and trailing whitespace from the expression""" + return value.strip() + + def evaluate( + self, + context: EvaluationContext = None, + raise_on_non_evaluable: bool = True, + force_evaluate: bool = True, + ) -> Union[float, list[float], unyt_array, Expression]: + """Evaluate this expression against the given context.""" + if context is None: + context = default_context + expr = expr_to_model(self.expression, context) + result = expr.evaluate(context, raise_on_non_evaluable, force_evaluate) + + # Sometimes we may yield a list of expressions instead of + # an expression containing a list, so we check this here + # and convert if necessary + + if isinstance(result, list): + is_expression_list = False + + for item in result: + if isinstance(item, Expression): + is_expression_list = True + + if is_expression_list: + result = Expression.model_validate(result) + + return result + + def user_variables(self): + """Get list of user variables used in expression.""" + expr = expr_to_model(self.expression, default_context) + names = expr.used_names() + names = [name for name in names if name in default_context.user_variable_names] + + return [UserVariable(name=name, value=default_context.get(name)) for name in names] + + def user_variable_names(self): + """Get list of user variable names used in expression.""" + expr = expr_to_model(self.expression, default_context) + names = expr.used_names() + names = [name for name in names if name in default_context.user_variable_names] + + return names + + def solver_variable_names(self): + """Get list of solver variable names used in expression.""" + expr = expr_to_model(self.expression, default_context) + names = expr.used_names() + names = [name for name in names if name in _solver_variables] + return names + + def to_solver_code(self, params): + """Convert to solver readable code.""" + + def translate_symbol(name): + alias = default_context.get_alias(name) + + if alias: + return alias + + match = re.fullmatch("u\\.(.+)", name) + + if match: + unit_name = match.group(1) + unit = Unit(unit_name) + conversion_factor = params.convert_unit(1.0 * unit, "flow360").v + return str(conversion_factor) + + return name + + partial_result = self.evaluate( + default_context, raise_on_non_evaluable=False, force_evaluate=False + ) + + if isinstance(partial_result, Expression): + expr = expr_to_model(partial_result.expression, default_context) + else: + expr = expr_to_model(_convert_numeric(partial_result), default_context) + + return expr_to_code(expr, TargetSyntax.CPP, translate_symbol) + + def __hash__(self): + return hash(self.expression) + + def __add__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.expression} + {str_arg}") + + def __sub__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{self.expression} - {str_arg}") + + def __mul__(self, other): + if isinstance(other, Number) and other == 0: + return Expression(expression="0") + + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"({self.expression}) * {str_arg}") + + def __truediv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"({self.expression}) / {str_arg}") + + def __floordiv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"({self.expression}) // {str_arg}") + + def __mod__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"({self.expression}) % {str_arg}") + + def __pow__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"({self.expression}) ** {str_arg}") + + def __neg__(self): + return Expression(expression=f"-({self.expression})") + + def __pos__(self): + return Expression(expression=f"+({self.expression})") + + def __abs__(self): + return Expression(expression=f"abs({self.expression})") + + def __radd__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} + {self.expression}") + + def __rsub__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} - {self.expression}") + + def __rmul__(self, other): + if isinstance(other, Number) and other == 0: + return Expression(expression="0") + + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} * ({self.expression})") + + def __rtruediv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} / ({self.expression})") + + def __rfloordiv__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} // ({self.expression})") + + def __rmod__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} % ({self.expression})") + + def __rpow__(self, other): + (arg, parenthesize) = _convert_argument(other) + str_arg = arg if not parenthesize else f"({arg})" + return Expression(expression=f"{str_arg} ** ({self.expression})") + + def __getitem__(self, index): + (arg, _) = _convert_argument(index) + tree = ast.parse(self.expression, mode="eval") + int_arg = None + try: + int_arg = int(arg) + except ValueError: + pass + if isinstance(tree.body, ast.List) and int_arg is not None: + # Expression string with list syntax, like "[aa,bb,cc]" + # and since the index is static we can reduce it + result = [ast.unparse(elt) for elt in tree.body.elts] + return Expression.model_validate(result[int_arg]) + return Expression(expression=f"({self.expression})[{arg}]") + + def __str__(self): + # pylint:disable=invalid-str-returned + return self.expression + + def __repr__(self): + return f"Expression({self.expression})" + + def __eq__(self, other): + if isinstance(other, Expression): + return self.expression == other.expression + return super().__eq__(other) + + @property + def dimensionality(self): + """The physical dimensionality of the expression.""" + value = self.evaluate(raise_on_non_evaluable=False, force_evaluate=True) + assert isinstance(value, (unyt_array, unyt_quantity)) + return value.units.dimensions + + @property + def length(self): + """The number of elements in the expression.""" + value = self.evaluate(raise_on_non_evaluable=False, force_evaluate=True) + assert isinstance( + value, (unyt_array, unyt_quantity, list, Number) + ), f"Unexpected evaluated result type: {type(value)}" + if isinstance(value, list): + return len(value) + return 1 if isinstance(value, (unyt_quantity, Number)) else value.shape[0] + + def get_output_units(self, input_params=None): + """ + Get the output units of the expression. + + - If self.output_units is None, derive the default output unit based on the + value's dimensionality and current unit system. + + - If self.output_units is valid u.Unit string, deserialize it and return it. + + - If self.output_units is valid unit system name, derive the default output + unit based on the value's dimensionality and the **given** unit system. + + - If expression is a number constant, return None. + + - Else raise ValueError. + """ + + def get_unit_from_unit_system(expression: Expression, unit_system_name: str): + """Derive the default output unit based on the value's dimensionality and current unit system""" + numerical_value = expression.evaluate(raise_on_non_evaluable=False, force_evaluate=True) + if isinstance(numerical_value, list): + numerical_value = numerical_value[0] + if not isinstance(numerical_value, (u.unyt_array, u.unyt_quantity)): + # Pure dimensionless constant + return None + + if unit_system_name in ("SI", "SI_unit_system"): + return numerical_value.in_base("mks").units + if unit_system_name in ("Imperial", "Imperial_unit_system"): + return numerical_value.in_base("imperial").units + if unit_system_name in ("CGS", "CGS_unit_system"): + return numerical_value.in_base("cgs").units + raise ValueError(f"[Internal] Invalid unit system: {unit_system_name}") + + try: + return u.Unit(self.output_units) + except u.exceptions.UnitParseError as e: + if input_params is None: + raise ValueError( + "[Internal] input_params required when output_units is not valid u.Unit string" + ) from e + if not self.output_units: + unit_system_name: Literal["SI", "Imperial", "CGS"] = input_params.unit_system.name + else: + unit_system_name = self.output_units + return get_unit_from_unit_system(self, unit_system_name) + + +T = TypeVar("T") + + +class ValueOrExpression(Expression, Generic[T]): + """Model accepting both value and expressions""" + + def __class_getitem__(cls, typevar_values): # pylint:disable=too-many-statements + def _internal_validator(value: Expression): + try: + result = value.evaluate(raise_on_non_evaluable=False, force_evaluate=True) + except Exception as err: + raise ValueError(f"expression evaluation failed: {err}") from err + pd.TypeAdapter(typevar_values).validate_python(result, context={"allow_inf_nan": True}) + return value + + expr_type = Annotated[Expression, pd.AfterValidator(_internal_validator)] + + def _deserialize(value) -> Self: + try: + value = SerializedValueOrExpression.model_validate(value) + if value.type_name == "number": + if value.units is not None: + # unyt objects + return unyt_array(value.value, value.units) + return value.value + if value.type_name == "expression": + return expr_type(expression=value.expression, output_units=value.output_units) + except Exception: # pylint:disable=broad-exception-caught + pass + + return value + + def _serializer(value, info) -> dict: + if isinstance(value, Expression): + serialized = SerializedValueOrExpression( + type_name="expression", + output_units=value.output_units, + ) + + serialized.expression = value.expression + + evaluated = value.evaluate(raise_on_non_evaluable=False, force_evaluate=True) + + if isinstance(evaluated, list): + # May result from Expression which is actually a list of expressions + try: + evaluated = u.unyt_array(evaluated) + except u.exceptions.IterableUnitCoercionError: + # Inconsistent units for components of list + pass + + if isinstance(evaluated, Number): + serialized.evaluated_value = ( + evaluated if not np.isnan(evaluated) else None # NaN-None handling + ) + elif isinstance(evaluated, unyt_array): + if evaluated.size == 1: + serialized.evaluated_value = ( + float(evaluated.value) + if not np.isnan(evaluated.value) + else None # NaN-None handling + ) + else: + serialized.evaluated_value = tuple( + item if not np.isnan(item) else None + for item in evaluated.value.tolist() + ) + + serialized.evaluated_units = str(evaluated.units.expr) + else: + serialized = SerializedValueOrExpression(type_name="number") + # Note: NaN handling should be unnecessary since it would + # have end up being expression first so not reaching here. + if isinstance(value, (Number, List)): + serialized.value = value + elif isinstance(value, unyt_array): + if value.size == 1: + serialized.value = float(value.value) + else: + serialized.value = tuple(value.value.tolist()) + + serialized.units = str(value.units.expr) + + return serialized.model_dump(**info.__dict__) + + def _discriminator(v: Any) -> str: + # Note: This is ran after deserializer + if isinstance(v, SerializedValueOrExpression): + return v.type_name + if isinstance(v, dict): + return v.get("typeName") if v.get("typeName") else v.get("type_name") + if isinstance(v, (Expression, Variable, str)): + return "expression" + if isinstance(v, list) and all(isinstance(item, Expression) for item in v): + return "expression" + if isinstance(v, (Number, unyt_array, list)): + return "number" + raise KeyError("Unknown expression input type: ", v, v.__class__.__name__) + + union_type = Annotated[ + Union[ + Annotated[expr_type, Tag("expression")], Annotated[typevar_values, Tag("number")] + ], + pd.Field(discriminator=Discriminator(_discriminator)), + BeforeValidator(_deserialize), + PlainSerializer(_serializer), + ] + return union_type diff --git a/flow360/component/simulation/user_code/core/utils.py b/flow360/component/simulation/user_code/core/utils.py new file mode 100644 index 000000000..a287afae6 --- /dev/null +++ b/flow360/component/simulation/user_code/core/utils.py @@ -0,0 +1,47 @@ +"""Utility functions for the user code module""" + +import re + +import pydantic as pd +from pydantic_core import InitErrorDetails + + +def is_number_string(s: str) -> bool: + """Check if the string represents a single scalar number""" + try: + float(s) + return True + except ValueError: + return False + + +def split_keep_delimiters(value: str, delimiters: list) -> list: + """split string but keep the delimiters""" + escaped_delimiters = [re.escape(d) for d in delimiters] + pattern = f"({'|'.join(escaped_delimiters)})" + result = re.split(pattern, value) + return [part for part in result if part != ""] + + +def handle_syntax_error(se: SyntaxError, source: str): + """Handle expression syntax error.""" + caret = " " * (se.offset - 1) + "^" if se.text and se.offset else None + msg = f"{se.msg} at line {se.lineno}, column {se.offset}" + if caret: + msg += f"\n{se.text.rstrip()}\n{caret}" + + raise pd.ValidationError.from_exception_data( + "expression_syntax", + [ + InitErrorDetails( + type="value_error", + msg=se.msg, + input=source, + ctx={ + "line": se.lineno, + "column": se.offset, + "error": msg, + }, + ) + ], + ) diff --git a/flow360/component/simulation/user_code/functions/__init__.py b/flow360/component/simulation/user_code/functions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/flow360/component/simulation/user_code/functions/math.py b/flow360/component/simulation/user_code/functions/math.py new file mode 100644 index 000000000..3eedae4ea --- /dev/null +++ b/flow360/component/simulation/user_code/functions/math.py @@ -0,0 +1,43 @@ +""" +Math.h for Flow360 Expression system +""" + +from typing import Any, Union + +import numpy as np +from unyt import unyt_array, unyt_quantity + +from flow360.component.simulation.user_code.core.types import Expression + + +def _handle_expression_list(value: list[Any]): + is_expression_list = False + + for item in value: + if isinstance(item, Expression): + is_expression_list = True + + if is_expression_list: + value = Expression.model_validate(value) + + return value + + +VectorInputType = Union[list[float], unyt_array, Expression] +ScalarInputType = Union[float, unyt_quantity, Expression] + + +def cross(left: VectorInputType, right: VectorInputType): + """Customized Cross function to work with the `Expression` and Variables""" + # Taking advantage of unyt as much as possible: + if isinstance(left, unyt_array) and isinstance(right, unyt_array): + return np.cross(left, right) + + # Otherwise + result = [ + left[1] * right[2] - left[2] * right[1], + left[2] * right[0] - left[0] * right[2], + left[0] * right[1] - left[1] * right[0], + ] + + return _handle_expression_list(result) diff --git a/flow360/component/simulation/user_code/variables/__init__.py b/flow360/component/simulation/user_code/variables/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/flow360/component/simulation/user_code/variables/control.py b/flow360/component/simulation/user_code/variables/control.py new file mode 100644 index 000000000..cdafc6ad9 --- /dev/null +++ b/flow360/component/simulation/user_code/variables/control.py @@ -0,0 +1,63 @@ +"""Control variables of Flow360""" + +from flow360.component.simulation import units as u +from flow360.component.simulation.user_code.core.types import SolverVariable + +# pylint:disable=no-member +MachRef = SolverVariable( + name="control.MachRef", + value=float("NaN"), + solver_name="machRef", + variable_type="Scalar", +) # Reference mach specified by the user +Tref = SolverVariable( + name="control.Tref", value=float("NaN") * u.K, variable_type="Scalar" +) # Temperature specified by the user +t = SolverVariable( + name="control.t", value=float("NaN") * u.s, variable_type="Scalar" +) # Physical time +physicalStep = SolverVariable( + name="control.physicalStep", value=float("NaN"), variable_type="Scalar" +) # Physical time step, starting from 0 +pseudoStep = SolverVariable( + name="control.pseudoStep", value=float("NaN"), variable_type="Scalar" +) # Pseudo time step within physical time step +timeStepSize = SolverVariable( + name="control.timeStepSize", value=float("NaN") * u.s, variable_type="Scalar" +) # Physical time step size +alphaAngle = SolverVariable( + name="control.alphaAngle", value=float("NaN") * u.rad, variable_type="Scalar" +) # Alpha angle specified in freestream +betaAngle = SolverVariable( + name="control.betaAngle", value=float("NaN") * u.rad, variable_type="Scalar" +) # Beta angle specified in freestream +pressureFreestream = SolverVariable( + name="control.pressureFreestream", value=float("NaN") * u.Pa, variable_type="Scalar" +) # Freestream reference pressure (1.0/1.4) +momentLengthX = SolverVariable( + name="control.momentLengthX", value=float("NaN") * u.m, variable_type="Scalar" +) # X component of momentLength +momentLengthY = SolverVariable( + name="control.momentLengthY", value=float("NaN") * u.m, variable_type="Scalar" +) # Y component of momentLength +momentLengthZ = SolverVariable( + name="control.momentLengthZ", value=float("NaN") * u.m, variable_type="Scalar" +) # Z component of momentLength +momentCenterX = SolverVariable( + name="control.momentCenterX", value=float("NaN") * u.m, variable_type="Scalar" +) # X component of momentCenter +momentCenterY = SolverVariable( + name="control.momentCenterY", value=float("NaN") * u.m, variable_type="Scalar" +) # Y component of momentCenter +momentCenterZ = SolverVariable( + name="control.momentCenterZ", value=float("NaN") * u.m, variable_type="Scalar" +) # Z component of momentCenter +theta = SolverVariable( + name="control.theta", value=float("NaN") * u.rad, variable_type="Scalar" +) # Rotation angle of volume zone +omega = SolverVariable( + name="control.omega", value=float("NaN") * u.rad, variable_type="Scalar" +) # Rotation speed of volume zone +omegaDot = SolverVariable( + name="control.omegaDot", value=float("NaN") * u.rad / u.s, variable_type="Scalar" +) # Rotation acceleration of volume zone diff --git a/flow360/component/simulation/user_code/variables/solution.py b/flow360/component/simulation/user_code/variables/solution.py new file mode 100644 index 000000000..9ca910689 --- /dev/null +++ b/flow360/component/simulation/user_code/variables/solution.py @@ -0,0 +1,335 @@ +"""Solution variables of Flow360""" + +import unyt as u + +from flow360.component.simulation.user_code.core.types import SolverVariable + +# pylint:disable = fixme +# TODO:Scalar type (needs further discussion on how to handle scalar values) +# bet_thrust = SolverVariable( +# name="solution.bet_thrust", value=float("NaN") +# ) # Thrust force for BET disk +# bet_torque = SolverVariable(name="solution.bet_torque", value=float("NaN")) # Torque for BET disk +# bet_omega = SolverVariable( +# name="solution.bet_omega", value=float("NaN") +# ) # Rotation speed for BET disk +# CD = SolverVariable(name="solution.CD", value=float("NaN")) # Drag coefficient on patch +# CL = SolverVariable(name="solution.CL", value=float("NaN")) # Lift coefficient on patch +# forceX = SolverVariable(name="solution.forceX", value=float("NaN")) # Total force in X direction +# forceY = SolverVariable(name="solution.forceY", value=float("NaN")) # Total force in Y direction +# forceZ = SolverVariable(name="solution.forceZ", value=float("NaN")) # Total force in Z direction +# momentX = SolverVariable(name="solution.momentX", value=float("NaN")) # Total moment in X direction +# momentY = SolverVariable(name="solution.momentY", value=float("NaN")) # Total moment in Y direction +# momentZ = SolverVariable(name="solution.momentZ", value=float("NaN")) # Total moment in Z direction + + +# pylint:disable=no-member +# Common +coordinate = SolverVariable( + name="solution.coordinate", + value=[float("NaN"), float("NaN"), float("NaN")] * u.m, + solver_name="coordinate", + variable_type="Volume", +) # Grid coordinates + +Cp = SolverVariable( + name="solution.Cp", + value=float("NaN"), + solver_name="___Cp", + variable_type="Volume", +) +Cpt = SolverVariable( + name="solution.Cpt", + value=float("NaN"), + solver_name="___Cpt", + variable_type="Volume", +) +grad_density = SolverVariable( + name="solution.grad_density", + value=[float("NaN"), float("NaN"), float("NaN")] * u.kg / u.m**4, + solver_name="___grad_density", + variable_type="Volume", +) +grad_u = SolverVariable( + name="solution.grad_u", + value=[float("NaN"), float("NaN"), float("NaN")] / u.s, + solver_name="___grad_u", + variable_type="Volume", +) +grad_v = SolverVariable( + name="solution.grad_v", + value=[float("NaN"), float("NaN"), float("NaN")] / u.s, + solver_name="___grad_v", + variable_type="Volume", +) +grad_w = SolverVariable( + name="solution.grad_w", + value=[float("NaN"), float("NaN"), float("NaN")] / u.s, + solver_name="___grad_w", + variable_type="Volume", +) +grad_pressure = SolverVariable( + name="solution.grad_pressure", + value=[float("NaN"), float("NaN"), float("NaN")] * u.Pa / u.m, + solver_name="___grad_pressure", + variable_type="Volume", +) + +Mach = SolverVariable( + name="solution.Mach", + value=float("NaN"), + solver_name="___Mach", + variable_type="Volume", +) +mut = SolverVariable( + name="solution.mut", + value=float("NaN") * u.kg / u.m / u.s, + solver_name="mut", + variable_type="Volume", +) # Turbulent viscosity +mut_ratio = SolverVariable( + name="solution.mut_ratio", + value=float("NaN"), + solver_name="___mut_ratio", + variable_type="Volume", +) +nu_hat = SolverVariable( + name="solution.nu_hat", + value=float("NaN") * u.m**2 / u.s, + solver_name="___nu_hat", + variable_type="Volume", +) +turbulence_kinetic_energy = SolverVariable( + name="solution.turbulence_kinetic_energy", + value=float("NaN") * u.J / u.kg, + solver_name="___turbulence_kinetic_energy", + variable_type="Volume", +) # k +specific_rate_of_dissipation = SolverVariable( + name="solution.specific_rate_of_dissipation", + value=float("NaN") / u.s, + solver_name="___specific_rate_of_dissipation", + variable_type="Volume", +) # Omega +amplification_factor = SolverVariable( + name="solution.amplification_factor", + value=float("NaN"), + solver_name="solutionTransition[0]", + variable_type="Volume", +) # transition model variable: n, non-dimensional +turbulence_intermittency = SolverVariable( + name="solution.turbulence_intermittency", + value=float("NaN"), + solver_name="solutionTransition[1]", + variable_type="Volume", +) # transition model variable: gamma, non-dimensional + + +density = SolverVariable( + name="solution.density", + value=float("NaN") * u.kg / u.m**3, + solver_name="primitiveVars[0]", + variable_type="Volume", +) +velocity = SolverVariable( + name="solution.velocity", + value=[float("NaN"), float("NaN"), float("NaN")] * u.m / u.s, + solver_name="___velocity", + variable_type="Volume", +) +pressure = SolverVariable( + name="solution.pressure", + value=float("NaN") * u.Pa, + solver_name="primitiveVars[4]", + variable_type="Volume", +) + +qcriterion = SolverVariable( + name="solution.qcriterion", + value=float("NaN") / u.s**2, + solver_name="___qcriterion", + variable_type="Volume", +) +entropy = SolverVariable( + name="solution.entropy", + value=float("NaN") * u.J / u.K, + solver_name="___entropy", + variable_type="Volume", +) +temperature = SolverVariable( + name="solution.temperature", + value=float("NaN") * u.K, + solver_name="___temperature", + variable_type="Volume", +) +vorticity = SolverVariable( + name="solution.vorticity", + value=[float("NaN"), float("NaN"), float("NaN")] / u.s, + solver_name="___vorticity", + variable_type="Volume", +) +wall_distance = SolverVariable( + name="solution.wall_distance", + value=float("NaN") * u.m, + solver_name="wallDistance", + variable_type="Volume", +) + +# Surface +CfVec = SolverVariable( + name="solution.CfVec", + value=[float("NaN"), float("NaN"), float("NaN")], + solver_name="___CfVec", + variable_type="Surface", +) +Cf = SolverVariable( + name="solution.Cf", + value=float("NaN"), + solver_name="___Cf", + variable_type="Surface", +) +heatflux = SolverVariable( + name="solution.heatflux", + value=float("NaN") * u.W / u.m**2, + solver_name="heatFlux", + variable_type="Surface", +) +node_normals = SolverVariable( + name="solution.node_normals", + value=[float("NaN"), float("NaN"), float("NaN")], + solver_name="nodeNormals", + variable_type="Surface", +) +node_forces_per_unit_area = SolverVariable( + name="solution.node_forces_per_unit_area", + value=[float("NaN"), float("NaN"), float("NaN")] * u.Pa, + solver_name="___node_forces_per_unit_area", + variable_type="Surface", +) +y_plus = SolverVariable( + name="solution.y_plus", value=float("NaN"), solver_name="yPlus", variable_type="Surface" +) +wall_shear_stress_magnitude = SolverVariable( + name="solution.wall_shear_stress_magnitude", + value=float("NaN") * u.Pa, + solver_name="___wall_shear_stress_magnitude", + variable_type="Surface", +) +heat_transfer_coefficient_static_temperature = SolverVariable( + name="solution.heat_transfer_coefficient_static_temperature", + value=float("NaN") * u.W / (u.m**2 * u.K), + solver_name="___heat_transfer_coefficient_static_temperature", + variable_type="Surface", +) +heat_transfer_coefficient_total_temperature = SolverVariable( + name="solution.heat_transfer_coefficient_total_temperature", + value=float("NaN") * u.W / (u.m**2 * u.K), + solver_name="___heat_transfer_coefficient_total_temperature", + variable_type="Surface", +) + + +# TODO +# pylint:disable = fixme +# velocity_relative = SolverVariable( +# name="solution.velocity_relative", +# value=[float("NaN"), float("NaN"), float("NaN")] * u.m / u.s, +# solver_name="velocityRelative", +# prepending_code="double velocityRelative[3];for(int i=0;i<3;i++)" +# + "{velocityRelative[i]=velocity[i]-nodeVelocity[i];}", +# variable_type="Volume", +# ) +# wallFunctionMetric = SolverVariable( +# name="solution.wallFunctionMetric", value=float("NaN"), variable_type="Surface" +# ) +# bet_metrics_alpha_degree = SolverVariable( +# name="solution.bet_metrics_alpha_degree", value=float("NaN") * u.deg, variable_type="Volume" +# ) +# bet_metrics_Cf_axial = SolverVariable( +# name="solution.bet_metrics_Cf_axial", value=float("NaN"), variable_type="Volume" +# ) +# bet_metrics_Cf_circumferential = SolverVariable( +# name="solution.bet_metrics_Cf_circumferential", value=float("NaN"), variable_type="Volume" +# ) +# bet_metrics_local_solidity_integral_weight = SolverVariable( +# name="solution.bet_metrics_local_solidity_integral_weight", +# value=float("NaN"), +# variable_type="Volume", +# ) +# bet_metrics_tip_loss_factor = SolverVariable( +# name="solution.bet_metrics_tip_loss_factor", value=float("NaN"), variable_type="Volume" +# ) +# bet_metrics_velocity_relative = SolverVariable( +# name="solution.bet_metrics_velocity_relative", +# value=[float("NaN"), float("NaN"), float("NaN")] * u.m / u.s, +# variable_type="Volume", +# ) +# betMetricsPerDisk = SolverVariable( +# name="solution.betMetricsPerDisk", value=float("NaN"), variable_type="Volume" +# ) + + +# Abandoned (Possible) +# SpalartAllmaras_hybridModel = SolverVariable( +# name="solution.SpalartAllmaras_hybridModel", value=float("NaN"), variable_type="Volume" +# ) +# kOmegaSST_hybridModel = SolverVariable( +# name="solution.kOmegaSST_hybridModel", value=float("NaN"), variable_type="Volume" +# ) +# localCFL = SolverVariable(name="solution.localCFL", value=float("NaN"), variable_type="Volume") +# numericalDissipationFactor = SolverVariable( +# name="solution.numericalDissipationFactor", value=float("NaN"), variable_type="Volume" +# ) +# lowMachPreconditionerSensor = SolverVariable( +# name="solution.lowMachPreconditionerSensor", value=float("NaN"), variable_type="Volume" +# ) + +# Abandoned +# linearResidualNavierStokes = SolverVariable( +# name="solution.linearResidualNavierStokes", value=float("NaN"), variable_type="Volume" +# ) +# linearResidualTurbulence = SolverVariable( +# name="solution.linearResidualTurbulence", value=float("NaN"), variable_type="Volume" +# ) +# linearResidualTransition = SolverVariable( +# name="solution.linearResidualTransition", value=float("NaN"), variable_type="Volume" +# ) +# residualNavierStokes = SolverVariable( +# name="solution.residualNavierStokes", value=float("NaN"), variable_type="Volume" +# ) +# residualTransition = SolverVariable( +# name="solution.residualTransition", value=float("NaN"), variable_type="Volume" +# ) +# residualTurbulence = SolverVariable( +# name="solution.residualTurbulence", value=float("NaN"), variable_type="Volume" +# ) +# solutionNavierStokes = SolverVariable( +# name="solution.solutionNavierStokes", value=float("NaN"), variable_type="Volume" +# ) +# solutionTurbulence = SolverVariable( +# name="solution.solutionTurbulence", value=float("NaN"), variable_type="Volume" +# ) +# residualHeatSolver = SolverVariable( +# name="solution.residualHeatSolver", value=float("NaN"), variable_type="Volume" +# ) +# velocity_x = SolverVariable(name="solution.velocity_x", value=float("NaN"), variable_type="Volume") +# velocity_y = SolverVariable(name="solution.velocity_y", value=float("NaN"), variable_type="Volume") +# velocity_z = SolverVariable(name="solution.velocity_z", value=float("NaN"), variable_type="Volume") +# velocity_magnitude = SolverVariable( +# name="solution.velocity_magnitude", value=float("NaN"), variable_type="Volume" +# ) +# vorticityMagnitude = SolverVariable( +# name="solution.vorticityMagnitude", value=float("NaN"), variable_type="Volume" +# ) +# vorticity_x = SolverVariable( +# name="solution.vorticity_x", value=float("NaN"), variable_type="Volume" +# ) +# vorticity_y = SolverVariable( +# name="solution.vorticity_y", value=float("NaN"), variable_type="Volume" +# ) +# vorticity_z = SolverVariable( +# name="solution.vorticity_z", value=float("NaN"), variable_type="Volume" +# ) +# wall_shear_stress_magnitude_pa = SolverVariable( +# name="solution.wall_shear_stress_magnitude_pa", value=float("NaN"), variable_type="Surface" +# ) diff --git a/flow360/component/simulation/validation/validation_output.py b/flow360/component/simulation/validation/validation_output.py index a63bfd534..6a0e60449 100644 --- a/flow360/component/simulation/validation/validation_output.py +++ b/flow360/component/simulation/validation/validation_output.py @@ -52,7 +52,7 @@ def extract_literal_values(annotation): allowed_items = natively_supported + additional_fields for item in output.output_fields.items: - if item not in allowed_items: + if isinstance(item, str) and item not in allowed_items and isinstance(item, str): raise ValueError( f"In `outputs`[{output_index}] {output.output_type}:, {item} is not a" f" valid output field name. Allowed fields are {allowed_items}." @@ -98,7 +98,7 @@ def _check_output_fields_valid_given_turbulence_model(params): if output.output_type in ("AeroAcousticOutput", "StreamlineOutput"): continue for item in output.output_fields.items: - if item in invalid_output_fields[turbulence_model]: + if isinstance(item, str) and item in invalid_output_fields[turbulence_model]: raise ValueError( f"In `outputs`[{output_index}] {output.output_type}:, {item} is not a valid" f" output field when using turbulence model: {turbulence_model}." diff --git a/flow360/component/v1/updater.py b/flow360/component/v1/updater.py index 6fad8b64f..5934f0dfa 100644 --- a/flow360/component/v1/updater.py +++ b/flow360/component/v1/updater.py @@ -28,7 +28,7 @@ def _no_update(params_as_dict): ("25.2.3", "25.4.0", _no_update), ("25.4.0", "25.4.1", _no_update), ("25.4.1", "25.5.1", _no_update), - ("25.5.1", "25.6.0", _no_update), + ("25.5.1", "25.6.1", _no_update), ] diff --git a/flow360/version.py b/flow360/version.py index eb53cf57f..cc57c2270 100644 --- a/flow360/version.py +++ b/flow360/version.py @@ -2,5 +2,5 @@ version """ -__version__ = "25.6.0b1" +__version__ = "25.6.1b1" __solver_version__ = "release-25.6" diff --git a/pyproject.toml b/pyproject.toml index 36985a59d..838d7edcd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "flow360" -version = "v25.6.0b1" +version = "v25.6.1b1" description = "" authors = ["Flexcompute "] diff --git a/tests/data/case-70489f25-d6b7-4a0b-81e1-2fa2e82fc57b/simulation.json b/tests/data/case-70489f25-d6b7-4a0b-81e1-2fa2e82fc57b/simulation.json index 6a7a81368..dcf666254 100644 --- a/tests/data/case-70489f25-d6b7-4a0b-81e1-2fa2e82fc57b/simulation.json +++ b/tests/data/case-70489f25-d6b7-4a0b-81e1-2fa2e82fc57b/simulation.json @@ -561,7 +561,8 @@ "reference_geometry": { "area": { "units": "m**2", - "value": 70685.83470577035 + "value": 70685.83470577035, + "type_name":"number" }, "moment_center": { "units": "m", diff --git a/tests/ref/simulation/service_init_geometry.json b/tests/ref/simulation/service_init_geometry.json index 3674e3d4c..cf55c1dbd 100644 --- a/tests/ref/simulation/service_init_geometry.json +++ b/tests/ref/simulation/service_init_geometry.json @@ -1,5 +1,5 @@ { - "version": "25.6.0b1", + "version": "25.6.1b1", "unit_system": { "name": "SI" }, @@ -42,6 +42,7 @@ "units": "m" }, "area": { + "type_name":"number", "value": 1.0, "units": "m**2" } diff --git a/tests/ref/simulation/service_init_surface_mesh.json b/tests/ref/simulation/service_init_surface_mesh.json index 7aca9f8ef..65a2b6c35 100644 --- a/tests/ref/simulation/service_init_surface_mesh.json +++ b/tests/ref/simulation/service_init_surface_mesh.json @@ -1,5 +1,5 @@ { - "version": "25.6.0b1", + "version": "25.6.1b1", "unit_system": { "name": "SI" }, @@ -42,6 +42,7 @@ "units": "cm" }, "area": { + "type_name":"number", "value": 1.0, "units": "cm**2" } diff --git a/tests/ref/simulation/service_init_volume_mesh.json b/tests/ref/simulation/service_init_volume_mesh.json index 0fff1f52c..df7a1e933 100644 --- a/tests/ref/simulation/service_init_volume_mesh.json +++ b/tests/ref/simulation/service_init_volume_mesh.json @@ -1,5 +1,5 @@ { - "version": "25.6.0b1", + "version": "25.6.1b1", "unit_system": { "name": "SI" }, @@ -21,6 +21,7 @@ "units": "m" }, "area": { + "type_name":"number", "value": 1.0, "units": "m**2" } diff --git a/tests/simulation/converter/ref/ref_monitor.json b/tests/simulation/converter/ref/ref_monitor.json index bb2931d02..58a93f8f8 100644 --- a/tests/simulation/converter/ref/ref_monitor.json +++ b/tests/simulation/converter/ref/ref_monitor.json @@ -1 +1 @@ -{"version":"25.6.0b1","unit_system":{"name":"SI"},"meshing":null,"reference_geometry":null,"operating_condition":null,"models":[{"material":{"type":"air","name":"air","dynamic_viscosity":{"reference_viscosity":{"value":0.00001716,"units":"Pa*s"},"reference_temperature":{"value":273.15,"units":"K"},"effective_temperature":{"value":110.4,"units":"K"}}},"initial_condition":{"type_name":"NavierStokesInitialCondition","constants":null,"rho":"rho","u":"u","v":"v","w":"w","p":"p"},"type":"Fluid","navier_stokes_solver":{"absolute_tolerance":1e-10,"relative_tolerance":0.0,"order_of_accuracy":2,"equation_evaluation_frequency":1,"linear_solver":{"max_iterations":30,"absolute_tolerance":null,"relative_tolerance":null},"private_attribute_dict":null,"CFL_multiplier":1.0,"kappa_MUSCL":-1.0,"numerical_dissipation_factor":1.0,"limit_velocity":false,"limit_pressure_density":false,"type_name":"Compressible","low_mach_preconditioner":false,"low_mach_preconditioner_threshold":null,"update_jacobian_frequency":4,"max_force_jac_update_physical_steps":0},"turbulence_model_solver":{"absolute_tolerance":1e-8,"relative_tolerance":0.0,"order_of_accuracy":2,"equation_evaluation_frequency":4,"linear_solver":{"max_iterations":20,"absolute_tolerance":null,"relative_tolerance":null},"private_attribute_dict":null,"CFL_multiplier":2.0,"type_name":"SpalartAllmaras","reconstruction_gradient_limiter":0.5,"quadratic_constitutive_relation":false,"modeling_constants":{"type_name":"SpalartAllmarasConsts","C_DES":0.72,"C_d":8.0,"C_cb1":0.1355,"C_cb2":0.622,"C_sigma":0.6666666666666666,"C_v1":7.1,"C_vonKarman":0.41,"C_w2":0.3,"C_t3":1.2,"C_t4":0.5,"C_min_rd":10.0},"update_jacobian_frequency":4,"max_force_jac_update_physical_steps":0,"hybrid_model":null,"rotation_correction":false, "controls":null},"transition_model_solver":{"type_name":"None"}}],"time_stepping":{"type_name":"Steady","max_steps":2000,"CFL":{"type":"adaptive","min":0.1,"max":10000.0,"max_relative_change":1.0,"convergence_limiting_factor":0.25}},"user_defined_dynamics":null,"user_defined_fields":[],"outputs":[{"name":"R1","entities":{"stored_entities":[{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"b9de2bce-36c1-4bbf-af0a-2c6a2ab713a4","name":"Point-0","location":{"value":[2.694298,0.0,1.0195910000000001],"units":"m"}}]},"output_fields":{"items":["primitiveVars"]},"output_type":"ProbeOutput"},{"name":"V3","entities":{"stored_entities":[{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"a79cffc0-31d0-499d-906c-f271c2320166","name":"Point-1","location":{"value":[4.007,0.0,-0.31760000000000005],"units":"m"}},{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"8947eb10-fc59-4102-b9c7-168a91ca22b9","name":"Point-2","location":{"value":[4.007,0.0,-0.29760000000000003],"units":"m"}},{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"27ac4e03-592b-4dba-8fa1-8f6678087a96","name":"Point-3","location":{"value":[4.007,0.0,-0.2776],"units":"m"}}]},"output_fields":{"items":["mut"]},"output_type":"ProbeOutput"}],"private_attribute_asset_cache":{"project_length_unit":null,"project_entity_info":null, "use_inhouse_mesher": false, "use_geometry_AI": false}} +{"version":"25.6.1b1","unit_system":{"name":"SI"},"meshing":null,"reference_geometry":null,"operating_condition":null,"models":[{"material":{"type":"air","name":"air","dynamic_viscosity":{"reference_viscosity":{"value":0.00001716,"units":"Pa*s"},"reference_temperature":{"value":273.15,"units":"K"},"effective_temperature":{"value":110.4,"units":"K"}}},"initial_condition":{"type_name":"NavierStokesInitialCondition","constants":null,"rho":"rho","u":"u","v":"v","w":"w","p":"p"},"type":"Fluid","navier_stokes_solver":{"absolute_tolerance":1e-10,"relative_tolerance":0.0,"order_of_accuracy":2,"equation_evaluation_frequency":1,"linear_solver":{"max_iterations":30,"absolute_tolerance":null,"relative_tolerance":null},"private_attribute_dict":null,"CFL_multiplier":1.0,"kappa_MUSCL":-1.0,"numerical_dissipation_factor":1.0,"limit_velocity":false,"limit_pressure_density":false,"type_name":"Compressible","low_mach_preconditioner":false,"low_mach_preconditioner_threshold":null,"update_jacobian_frequency":4,"max_force_jac_update_physical_steps":0},"turbulence_model_solver":{"absolute_tolerance":1e-8,"relative_tolerance":0.0,"order_of_accuracy":2,"equation_evaluation_frequency":4,"linear_solver":{"max_iterations":20,"absolute_tolerance":null,"relative_tolerance":null},"private_attribute_dict":null,"CFL_multiplier":2.0,"type_name":"SpalartAllmaras","reconstruction_gradient_limiter":0.5,"quadratic_constitutive_relation":false,"modeling_constants":{"type_name":"SpalartAllmarasConsts","C_DES":0.72,"C_d":8.0,"C_cb1":0.1355,"C_cb2":0.622,"C_sigma":0.6666666666666666,"C_v1":7.1,"C_vonKarman":0.41,"C_w2":0.3,"C_t3":1.2,"C_t4":0.5,"C_min_rd":10.0},"update_jacobian_frequency":4,"max_force_jac_update_physical_steps":0,"hybrid_model":null,"rotation_correction":false, "controls":null},"transition_model_solver":{"type_name":"None"}}],"time_stepping":{"type_name":"Steady","max_steps":2000,"CFL":{"type":"adaptive","min":0.1,"max":10000.0,"max_relative_change":1.0,"convergence_limiting_factor":0.25}},"user_defined_dynamics":null,"user_defined_fields":[],"outputs":[{"name":"R1","entities":{"stored_entities":[{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"b9de2bce-36c1-4bbf-af0a-2c6a2ab713a4","name":"Point-0","location":{"value":[2.694298,0.0,1.0195910000000001],"units":"m"}}]},"output_fields":{"items":["primitiveVars"]},"output_type":"ProbeOutput"},{"name":"V3","entities":{"stored_entities":[{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"a79cffc0-31d0-499d-906c-f271c2320166","name":"Point-1","location":{"value":[4.007,0.0,-0.31760000000000005],"units":"m"}},{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"8947eb10-fc59-4102-b9c7-168a91ca22b9","name":"Point-2","location":{"value":[4.007,0.0,-0.29760000000000003],"units":"m"}},{"private_attribute_registry_bucket_name":"PointEntityType","private_attribute_entity_type_name":"Point","private_attribute_id":"27ac4e03-592b-4dba-8fa1-8f6678087a96","name":"Point-3","location":{"value":[4.007,0.0,-0.2776],"units":"m"}}]},"output_fields":{"items":["mut"]},"output_type":"ProbeOutput"}],"private_attribute_asset_cache":{"project_length_unit":null,"project_entity_info":null, "use_inhouse_mesher": false, "project_variables":null, "use_geometry_AI": false}} diff --git a/tests/simulation/data/simulation.json b/tests/simulation/data/simulation.json new file mode 100644 index 000000000..63e0422bb --- /dev/null +++ b/tests/simulation/data/simulation.json @@ -0,0 +1,1394 @@ +{ + "version": "25.5.0b4", + "unit_system": { + "name": "SI" + }, + "meshing": { + "refinement_factor": 1, + "gap_treatment_strength": 0, + "defaults": { + "surface_edge_growth_rate": 1.2, + "boundary_layer_growth_rate": 1.2, + "boundary_layer_first_layer_thickness": { + "value": 0.001, + "units": "m" + }, + "planar_face_tolerance": 0.000001, + "surface_max_edge_length": { + "value": 1, + "units": "m" + }, + "curvature_resolution_angle": { + "value": 12, + "units": "degree" + } + }, + "refinements": [], + "volume_zones": [ + { + "type": "AutomatedFarfield", + "name": "Automated Farfield", + "method": "auto" + } + ] + }, + "reference_geometry": { + "moment_center": { + "type_name": "number", + "value": [ + 0, + 0, + 0 + ], + "units": "m" + }, + "moment_length": { + "value": [ + 1, + 1, + 1 + ], + "units": "m" + }, + "area": { + "type_name": "expression", + "expression": "x * u.m**2", + "evaluated_value": 1, + "evaluated_units": "m**2" + } + }, + "operating_condition": { + "type_name": "AerospaceCondition", + "private_attribute_constructor": "default", + "private_attribute_input_cache": { + "alpha": { + "value": 5, + "units": "degree" + }, + "beta": { + "value": 0, + "units": "degree" + }, + "thermal_state": { + "type_name": "ThermalState", + "private_attribute_constructor": "default", + "private_attribute_input_cache": {}, + "temperature": { + "value": 288.15, + "units": "K" + }, + "density": { + "value": 1.225, + "units": "kg/m**3" + }, + "material": { + "type": "air", + "name": "air", + "dynamic_viscosity": { + "reference_viscosity": { + "value": 0.00001716, + "units": "Pa*s" + }, + "reference_temperature": { + "value": 273.15, + "units": "K" + }, + "effective_temperature": { + "value": 110.4, + "units": "K" + } + } + } + } + }, + "alpha": { + "value": 5, + "units": "degree" + }, + "beta": { + "value": 0, + "units": "degree" + }, + "velocity_magnitude": { + "value": 100, + "units": "m/s" + }, + "thermal_state": { + "type_name": "ThermalState", + "private_attribute_constructor": "default", + "private_attribute_input_cache": {}, + "temperature": { + "value": 288.15, + "units": "K" + }, + "density": { + "value": 1.225, + "units": "kg/m**3" + }, + "material": { + "type": "air", + "name": "air", + "dynamic_viscosity": { + "reference_viscosity": { + "value": 0.00001716, + "units": "Pa*s" + }, + "reference_temperature": { + "value": 273.15, + "units": "K" + }, + "effective_temperature": { + "value": 110.4, + "units": "K" + } + } + } + } + }, + "models": [ + { + "type": "Wall", + "entities": { + "stored_entities": [ + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "leftWing", + "name": "leftWing", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00001", + "body00001_face00002", + "body00001_face00003", + "body00001_face00004" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "fuselage", + "name": "fuselage", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00005", + "body00001_face00006", + "body00001_face00007", + "body00001_face00008", + "body00001_face00009", + "body00001_face00010" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "rightWing", + "name": "rightWing", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00011", + "body00001_face00012", + "body00001_face00013", + "body00001_face00014" + ], + "private_attribute_potential_issues": [] + } + ] + }, + "name": "Wall", + "use_wall_function": false, + "heat_spec": { + "value": { + "value": 0, + "units": "W/m**2" + }, + "type_name": "HeatFlux" + }, + "roughness_height": { + "value": 0, + "units": "m" + } + }, + { + "type": "Freestream", + "entities": { + "stored_entities": [ + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "GhostSphere", + "private_attribute_id": "farfield", + "name": "farfield", + "center": [ + 5.0007498695, + 0, + 0 + ], + "max_radius": 504.16453591327473 + } + ] + }, + "name": "Freestream" + }, + { + "material": { + "type": "air", + "name": "air", + "dynamic_viscosity": { + "reference_viscosity": { + "value": 0.00001716, + "units": "Pa*s" + }, + "reference_temperature": { + "value": 273.15, + "units": "K" + }, + "effective_temperature": { + "value": 110.4, + "units": "K" + } + } + }, + "initial_condition": { + "type_name": "NavierStokesInitialCondition", + "rho": "rho", + "u": "u", + "v": "v", + "w": "w", + "p": "p" + }, + "type": "Fluid", + "navier_stokes_solver": { + "absolute_tolerance": 1e-10, + "relative_tolerance": 0, + "order_of_accuracy": 2, + "equation_evaluation_frequency": 1, + "linear_solver": { + "max_iterations": 30 + }, + "CFL_multiplier": 1, + "kappa_MUSCL": -1, + "numerical_dissipation_factor": 1, + "limit_velocity": false, + "limit_pressure_density": false, + "type_name": "Compressible", + "low_mach_preconditioner": false, + "update_jacobian_frequency": 4, + "max_force_jac_update_physical_steps": 0 + }, + "turbulence_model_solver": { + "absolute_tolerance": 1e-8, + "relative_tolerance": 0, + "order_of_accuracy": 2, + "equation_evaluation_frequency": 4, + "linear_solver": { + "max_iterations": 20 + }, + "CFL_multiplier": 2, + "type_name": "SpalartAllmaras", + "reconstruction_gradient_limiter": 0.5, + "quadratic_constitutive_relation": false, + "modeling_constants": { + "type_name": "SpalartAllmarasConsts", + "C_DES": 0.72, + "C_d": 8, + "C_cb1": 0.1355, + "C_cb2": 0.622, + "C_sigma": 0.6666666666666666, + "C_v1": 7.1, + "C_vonKarman": 0.41, + "C_w2": 0.3, + "C_t3": 1.2, + "C_t4": 0.5, + "C_min_rd": 10 + }, + "update_jacobian_frequency": 4, + "max_force_jac_update_physical_steps": 0, + "rotation_correction": false + }, + "transition_model_solver": { + "type_name": "None" + } + } + ], + "time_stepping": { + "type_name": "Steady", + "max_steps": 1000, + "CFL": { + "type": "adaptive", + "min": 0.1, + "max": 10000, + "max_relative_change": 1, + "convergence_limiting_factor": 0.25 + } + }, + "user_defined_fields": [], + "outputs": [ + { + "output_fields": { + "items": [ + "Cp", + "Cf", + "yPlus", + "CfVec" + ] + }, + "frequency": -1, + "frequency_offset": 0, + "output_format": "paraview", + "name": "Surface output", + "entities": { + "stored_entities": [ + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "leftWing", + "name": "leftWing", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00001", + "body00001_face00002", + "body00001_face00003", + "body00001_face00004" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "fuselage", + "name": "fuselage", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00005", + "body00001_face00006", + "body00001_face00007", + "body00001_face00008", + "body00001_face00009", + "body00001_face00010" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "rightWing", + "name": "rightWing", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00011", + "body00001_face00012", + "body00001_face00013", + "body00001_face00014" + ], + "private_attribute_potential_issues": [] + } + ] + }, + "write_single_file": false, + "output_type": "SurfaceOutput" + } + ], + "private_attribute_asset_cache": { + "project_length_unit": { + "value": 1, + "units": "m" + }, + "project_entity_info": { + "draft_entities": [], + "ghost_entities": [ + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "GhostSphere", + "private_attribute_id": "farfield", + "name": "farfield", + "center": [ + 5.0007498695, + 0, + 0 + ], + "max_radius": 504.16453591327473 + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "GhostCircularPlane", + "private_attribute_id": "symmetric-1", + "name": "symmetric-1", + "center": [ + 5.0007498695, + -5.0416453591327475, + 0 + ], + "max_radius": 10.083290718265495, + "normal_axis": [ + 0, + 1, + 0 + ] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "GhostCircularPlane", + "private_attribute_id": "symmetric-2", + "name": "symmetric-2", + "center": [ + 5.0007498695, + 5.0416453591327475, + 0 + ], + "max_radius": 10.083290718265495, + "normal_axis": [ + 0, + 1, + 0 + ] + } + ], + "type_name": "GeometryEntityInfo", + "body_ids": [ + "body00001" + ], + "body_attribute_names": [ + "bodyId", + "groupByFile" + ], + "grouped_bodies": [ + [ + { + "private_attribute_registry_bucket_name": "GeometryBodyGroupEntityType", + "private_attribute_entity_type_name": "GeometryBodyGroup", + "private_attribute_id": "body00001", + "name": "body00001", + "private_attribute_tag_key": "bodyId", + "private_attribute_sub_components": [ + "body00001" + ], + "transformation": { + "type_name": "BodyGroupTransformation", + "origin": { + "value": [ + 0, + 0, + 0 + ], + "units": "m" + }, + "axis_of_rotation": [ + 1, + 0, + 0 + ], + "angle_of_rotation": { + "value": 0, + "units": "degree" + }, + "scale": [ + 1, + 1, + 1 + ], + "translation": { + "value": [ + 0, + 0, + 0 + ], + "units": "m" + } + } + } + ], + [ + { + "private_attribute_registry_bucket_name": "GeometryBodyGroupEntityType", + "private_attribute_entity_type_name": "GeometryBodyGroup", + "private_attribute_id": "geometry.csm", + "name": "geometry.csm", + "private_attribute_tag_key": "groupByFile", + "private_attribute_sub_components": [ + "body00001" + ], + "transformation": { + "type_name": "BodyGroupTransformation", + "origin": { + "value": [ + 0, + 0, + 0 + ], + "units": "m" + }, + "axis_of_rotation": [ + 1, + 0, + 0 + ], + "angle_of_rotation": { + "value": 0, + "units": "degree" + }, + "scale": [ + 1, + 1, + 1 + ], + "translation": { + "value": [ + 0, + 0, + 0 + ], + "units": "m" + } + } + } + ] + ], + "face_ids": [ + "body00001_face00001", + "body00001_face00002", + "body00001_face00003", + "body00001_face00004", + "body00001_face00005", + "body00001_face00006", + "body00001_face00007", + "body00001_face00008", + "body00001_face00009", + "body00001_face00010", + "body00001_face00011", + "body00001_face00012", + "body00001_face00013", + "body00001_face00014" + ], + "face_attribute_names": [ + "groupName", + "faceId" + ], + "grouped_faces": [ + [ + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "leftWing", + "name": "leftWing", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00001", + "body00001_face00002", + "body00001_face00003", + "body00001_face00004" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "fuselage", + "name": "fuselage", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00005", + "body00001_face00006", + "body00001_face00007", + "body00001_face00008", + "body00001_face00009", + "body00001_face00010" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "rightWing", + "name": "rightWing", + "private_attribute_tag_key": "groupName", + "private_attribute_sub_components": [ + "body00001_face00011", + "body00001_face00012", + "body00001_face00013", + "body00001_face00014" + ], + "private_attribute_potential_issues": [] + } + ], + [ + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00001", + "name": "body00001_face00001", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00001" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00002", + "name": "body00001_face00002", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00002" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00003", + "name": "body00001_face00003", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00003" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00004", + "name": "body00001_face00004", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00004" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00005", + "name": "body00001_face00005", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00005" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00006", + "name": "body00001_face00006", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00006" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00007", + "name": "body00001_face00007", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00007" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00008", + "name": "body00001_face00008", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00008" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00009", + "name": "body00001_face00009", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00009" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00010", + "name": "body00001_face00010", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00010" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00011", + "name": "body00001_face00011", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00011" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00012", + "name": "body00001_face00012", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00012" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00013", + "name": "body00001_face00013", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00013" + ], + "private_attribute_potential_issues": [] + }, + { + "private_attribute_registry_bucket_name": "SurfaceEntityType", + "private_attribute_entity_type_name": "Surface", + "private_attribute_id": "body00001_face00014", + "name": "body00001_face00014", + "private_attribute_tag_key": "faceId", + "private_attribute_sub_components": [ + "body00001_face00014" + ], + "private_attribute_potential_issues": [] + } + ] + ], + "edge_ids": [ + "body00001_edge00001", + "body00001_edge00002", + "body00001_edge00004", + "body00001_edge00005", + "body00001_edge00006", + "body00001_edge00007", + "body00001_edge00008", + "body00001_edge00009", + "body00001_edge00010", + "body00001_edge00012", + "body00001_edge00013", + "body00001_edge00014", + "body00001_edge00015", + "body00001_edge00016", + "body00001_edge00017", + "body00001_edge00018", + "body00001_edge00019", + "body00001_edge00020", + "body00001_edge00021", + "body00001_edge00022", + "body00001_edge00023", + "body00001_edge00024", + "body00001_edge00025", + "body00001_edge00026", + "body00001_edge00027", + "body00001_edge00029", + "body00001_edge00030", + "body00001_edge00031", + "body00001_edge00032", + "body00001_edge00033" + ], + "edge_attribute_names": [ + "edgeName", + "edgeId" + ], + "grouped_edges": [ + [ + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "trailingEdge", + "name": "trailingEdge", + "private_attribute_tag_key": "edgeName", + "private_attribute_sub_components": [ + "body00001_edge00001", + "body00001_edge00005", + "body00001_edge00026", + "body00001_edge00030" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "leadingEdge", + "name": "leadingEdge", + "private_attribute_tag_key": "edgeName", + "private_attribute_sub_components": [ + "body00001_edge00007", + "body00001_edge00032" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00002", + "name": "body00001_edge00002", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00002" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00004", + "name": "body00001_edge00004", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00004" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00006", + "name": "body00001_edge00006", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00006" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00008", + "name": "body00001_edge00008", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00008" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00009", + "name": "body00001_edge00009", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00009" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00010", + "name": "body00001_edge00010", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00010" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00012", + "name": "body00001_edge00012", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00012" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00013", + "name": "body00001_edge00013", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00013" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00014", + "name": "body00001_edge00014", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00014" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00015", + "name": "body00001_edge00015", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00015" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00016", + "name": "body00001_edge00016", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00016" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00017", + "name": "body00001_edge00017", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00017" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00018", + "name": "body00001_edge00018", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00018" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00019", + "name": "body00001_edge00019", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00019" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00020", + "name": "body00001_edge00020", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00020" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00021", + "name": "body00001_edge00021", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00021" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00022", + "name": "body00001_edge00022", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00022" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00023", + "name": "body00001_edge00023", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00023" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00024", + "name": "body00001_edge00024", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00024" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00025", + "name": "body00001_edge00025", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00025" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00027", + "name": "body00001_edge00027", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00027" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00029", + "name": "body00001_edge00029", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00029" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00031", + "name": "body00001_edge00031", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00031" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00033", + "name": "body00001_edge00033", + "private_attribute_tag_key": "__standalone__", + "private_attribute_sub_components": [ + "body00001_edge00033" + ] + } + ], + [ + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00001", + "name": "body00001_edge00001", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00001" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00002", + "name": "body00001_edge00002", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00002" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00004", + "name": "body00001_edge00004", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00004" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00005", + "name": "body00001_edge00005", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00005" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00006", + "name": "body00001_edge00006", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00006" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00007", + "name": "body00001_edge00007", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00007" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00008", + "name": "body00001_edge00008", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00008" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00009", + "name": "body00001_edge00009", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00009" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00010", + "name": "body00001_edge00010", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00010" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00012", + "name": "body00001_edge00012", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00012" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00013", + "name": "body00001_edge00013", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00013" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00014", + "name": "body00001_edge00014", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00014" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00015", + "name": "body00001_edge00015", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00015" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00016", + "name": "body00001_edge00016", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00016" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00017", + "name": "body00001_edge00017", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00017" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00018", + "name": "body00001_edge00018", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00018" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00019", + "name": "body00001_edge00019", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00019" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00020", + "name": "body00001_edge00020", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00020" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00021", + "name": "body00001_edge00021", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00021" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00022", + "name": "body00001_edge00022", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00022" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00023", + "name": "body00001_edge00023", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00023" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00024", + "name": "body00001_edge00024", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00024" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00025", + "name": "body00001_edge00025", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00025" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00026", + "name": "body00001_edge00026", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00026" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00027", + "name": "body00001_edge00027", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00027" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00029", + "name": "body00001_edge00029", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00029" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00030", + "name": "body00001_edge00030", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00030" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00031", + "name": "body00001_edge00031", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00031" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00032", + "name": "body00001_edge00032", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00032" + ] + }, + { + "private_attribute_registry_bucket_name": "EdgeEntityType", + "private_attribute_entity_type_name": "Edge", + "private_attribute_id": "body00001_edge00033", + "name": "body00001_edge00033", + "private_attribute_tag_key": "edgeId", + "private_attribute_sub_components": [ + "body00001_edge00033" + ] + } + ] + ], + "body_group_tag": "groupByFile", + "face_group_tag": "groupName", + "edge_group_tag": "edgeName" + }, + "use_inhouse_mesher": false, + "use_geometry_AI": false, + "project_variables": [ + { + "name": "x", + "value": { + "type_name": "number", + "value": 1 + }, + "postProcessing": false + } + ] + } +} \ No newline at end of file diff --git a/tests/simulation/ref/simulation_with_project_variables.json b/tests/simulation/ref/simulation_with_project_variables.json new file mode 100644 index 000000000..fcf4c83e9 --- /dev/null +++ b/tests/simulation/ref/simulation_with_project_variables.json @@ -0,0 +1,283 @@ +{ + "version": "25.6.1b1", + "unit_system": { + "name": "SI" + }, + "operating_condition": { + "type_name": "AerospaceCondition", + "private_attribute_constructor": "default", + "private_attribute_input_cache": { + "alpha": { + "value": 0.0, + "units": "degree" + }, + "beta": { + "value": 0.0, + "units": "degree" + }, + "thermal_state": { + "type_name": "ThermalState", + "private_attribute_constructor": "default", + "private_attribute_input_cache": {}, + "temperature": { + "value": 288.15, + "units": "K" + }, + "density": { + "value": 1.225, + "units": "kg/m**3" + }, + "material": { + "type": "air", + "name": "air", + "dynamic_viscosity": { + "reference_viscosity": { + "value": 0.00001716, + "units": "Pa*s" + }, + "reference_temperature": { + "value": 273.15, + "units": "K" + }, + "effective_temperature": { + "value": 110.4, + "units": "K" + } + } + } + } + }, + "alpha": { + "value": 0.0, + "units": "degree" + }, + "beta": { + "value": 0.0, + "units": "degree" + }, + "velocity_magnitude": { + "value": 10.0, + "units": "m/s" + }, + "thermal_state": { + "type_name": "ThermalState", + "private_attribute_constructor": "default", + "private_attribute_input_cache": {}, + "temperature": { + "value": 288.15, + "units": "K" + }, + "density": { + "value": 1.225, + "units": "kg/m**3" + }, + "material": { + "type": "air", + "name": "air", + "dynamic_viscosity": { + "reference_viscosity": { + "value": 0.00001716, + "units": "Pa*s" + }, + "reference_temperature": { + "value": 273.15, + "units": "K" + }, + "effective_temperature": { + "value": 110.4, + "units": "K" + } + } + } + }, + "reference_velocity_magnitude": { + "value": 10.0, + "units": "m/s" + } + }, + "models": [ + { + "material": { + "type": "air", + "name": "air", + "dynamic_viscosity": { + "reference_viscosity": { + "value": 0.00001716, + "units": "Pa*s" + }, + "reference_temperature": { + "value": 273.15, + "units": "K" + }, + "effective_temperature": { + "value": 110.4, + "units": "K" + } + } + }, + "initial_condition": { + "type_name": "NavierStokesInitialCondition", + "rho": "rho", + "u": "u", + "v": "v", + "w": "w", + "p": "p" + }, + "type": "Fluid", + "navier_stokes_solver": { + "absolute_tolerance": 1e-10, + "relative_tolerance": 0.0, + "order_of_accuracy": 2, + "equation_evaluation_frequency": 1, + "linear_solver": { + "max_iterations": 30 + }, + "CFL_multiplier": 1.0, + "kappa_MUSCL": -1.0, + "numerical_dissipation_factor": 1.0, + "limit_velocity": false, + "limit_pressure_density": false, + "type_name": "Compressible", + "low_mach_preconditioner": false, + "update_jacobian_frequency": 4, + "max_force_jac_update_physical_steps": 0 + }, + "turbulence_model_solver": { + "absolute_tolerance": 1e-8, + "relative_tolerance": 0.0, + "order_of_accuracy": 2, + "equation_evaluation_frequency": 4, + "linear_solver": { + "max_iterations": 20 + }, + "CFL_multiplier": 2.0, + "type_name": "SpalartAllmaras", + "reconstruction_gradient_limiter": 0.5, + "quadratic_constitutive_relation": false, + "modeling_constants": { + "type_name": "SpalartAllmarasConsts", + "C_DES": 0.72, + "C_d": 8.0, + "C_cb1": 0.1355, + "C_cb2": 0.622, + "C_sigma": 0.6666666666666666, + "C_v1": 7.1, + "C_vonKarman": 0.41, + "C_w2": 0.3, + "C_t3": 1.2, + "C_t4": 0.5, + "C_min_rd": 10.0 + }, + "update_jacobian_frequency": 4, + "max_force_jac_update_physical_steps": 0, + "rotation_correction": false + }, + "transition_model_solver": { + "type_name": "None" + } + } + ], + "time_stepping": { + "type_name": "Steady", + "max_steps": 2000, + "CFL": { + "type": "adaptive", + "min": 0.1, + "max": 10000.0, + "max_relative_change": 1.0, + "convergence_limiting_factor": 0.25 + } + }, + "user_defined_fields": [], + "outputs": [ + { + "output_fields": { + "items": [ + { + "name": "bbb", + "type_name": "UserVariable" + } + ] + }, + "frequency": -1, + "frequency_offset": 0, + "output_format": "paraview", + "name": "Volume output", + "output_type": "VolumeOutput" + }, + { + "output_fields": { + "items": [ + { + "name": "bbb", + "type_name": "UserVariable" + } + ] + }, + "name": "Probe output", + "entities": { + "stored_entities": [ + { + "private_attribute_registry_bucket_name": "PointEntityType", + "private_attribute_entity_type_name": "Point", + "private_attribute_id": "111", + "name": "pt1", + "location": { + "value": [ + 1.0, + 2.0, + 3.0 + ], + "units": "m" + } + } + ] + }, + "output_type": "ProbeOutput" + } + ], + "private_attribute_asset_cache": { + "use_inhouse_mesher": false, + "use_geometry_AI": false, + "project_variables": [ + { + "name": "ccc", + "value": { + "type_name": "number", + "value": 12.0, + "units": "m/s" + }, + "postProcessing": false + }, + { + "name": "aaa", + "value": { + "type_name": "expression", + "expression": "[solution.velocity[0] + ccc, solution.velocity[1], solution.velocity[2]]", + "evaluated_value": [ + null, + null, + null + ], + "evaluated_units": "m/s" + }, + "postProcessing": false + }, + { + "name": "bbb", + "value": { + "type_name": "expression", + "expression": "[aaa[0] + 14 * u.m / u.s, aaa[1], aaa[2]]", + "evaluated_value": [ + null, + null, + null + ], + "evaluated_units": "m/s", + "output_units": "km/ms" + }, + "postProcessing": true + } + ] + } +} \ No newline at end of file diff --git a/tests/simulation/service/test_services_v2.py b/tests/simulation/service/test_services_v2.py index 6494e1788..fcbe30039 100644 --- a/tests/simulation/service/test_services_v2.py +++ b/tests/simulation/service/test_services_v2.py @@ -146,7 +146,7 @@ def test_validate_error(): "reference_geometry": { "moment_center": {"value": [0, 0, 0], "units": "m"}, "moment_length": {"value": 1.0, "units": "m"}, - "area": {"value": 1.0, "units": "m**2"}, + "area": {"value": 1.0, "units": "m**2", "type_name": "number"}, }, "time_stepping": { "type_name": "Steady", @@ -1065,7 +1065,7 @@ def test_forward_compatibility_error(): ) assert errors[0] == { - "type": "99.99.99 > 25.6.0b1", + "type": "99.99.99 > 25.6.1b1", "loc": [], "msg": "The cloud `SimulationParam` is too new for your local Python client. " "Errors may occur since forward compatibility is limited.", @@ -1079,7 +1079,7 @@ def test_forward_compatibility_error(): ) assert errors[0] == { - "type": "99.99.99 > 25.6.0b1", + "type": "99.99.99 > 25.6.1b1", "loc": [], "msg": "[Internal] Your `SimulationParams` is too new for the solver. Errors may occur since forward compatibility is limited.", "ctx": {}, diff --git a/tests/simulation/test_expressions.py b/tests/simulation/test_expressions.py new file mode 100644 index 000000000..3252d8b98 --- /dev/null +++ b/tests/simulation/test_expressions.py @@ -0,0 +1,1166 @@ +import json +import re +from typing import Annotated, Optional + +import numpy as np +import pydantic as pd +import pytest + +import flow360.component.simulation.user_code.core.context as context +from flow360 import ( + AerospaceCondition, + HeatEquationInitialCondition, + LiquidOperatingCondition, + SimulationParams, + Solid, + Unsteady, + math, + u, +) +from flow360.component.project_utils import save_user_variables +from flow360.component.simulation.framework.base_model import Flow360BaseModel +from flow360.component.simulation.framework.param_utils import AssetCache +from flow360.component.simulation.framework.updater_utils import compare_lists +from flow360.component.simulation.models.material import Water, aluminum +from flow360.component.simulation.outputs.output_entities import Point +from flow360.component.simulation.outputs.outputs import ( + ProbeOutput, + SurfaceOutput, + VolumeOutput, +) +from flow360.component.simulation.primitives import ( + GenericVolume, + ReferenceGeometry, + Surface, +) +from flow360.component.simulation.services import ( + ValidationCalledBy, + validate_expression, + validate_model, +) +from flow360.component.simulation.translator.solver_translator import ( + user_variable_to_udf, +) +from flow360.component.simulation.unit_system import ( + AbsoluteTemperatureType, + AngleType, + AngularVelocityType, + AreaType, + DensityType, + ForceType, + FrequencyType, + HeatFluxType, + HeatSourceType, + InverseAreaType, + InverseLengthType, + LengthType, + MassFlowRateType, + MassType, + MomentType, + PowerType, + PressureType, + SI_unit_system, + SpecificEnergyType, + SpecificHeatCapacityType, + ThermalConductivityType, + TimeType, + VelocityType, + ViscosityType, +) +from flow360.component.simulation.user_code.core.context import WHITELISTED_CALLABLES +from flow360.component.simulation.user_code.core.types import ( + Expression, + SolverVariable, + UserVariable, + ValueOrExpression, +) +from flow360.component.simulation.user_code.variables import control, solution +from tests.utils import to_file_from_file_test + + +@pytest.fixture(autouse=True) +def reset_context(): + context.default_context.clear() + + +@pytest.fixture(autouse=True) +def change_test_dir(request, monkeypatch): + monkeypatch.chdir(request.fspath.dirname) + + +@pytest.fixture() +def constant_variable(): + return UserVariable(name="constant_variable", value=10) + + +@pytest.fixture() +def constant_array(): + return UserVariable(name="constant_array", value=[10, 20]) + + +@pytest.fixture() +def constant_unyt_quantity(): + return UserVariable(name="constant_unyt_quantity", value=10 * u.m) + + +@pytest.fixture() +def constant_unyt_array(): + return UserVariable(name="constant_unyt_array", value=[10, 20] * u.m) + + +@pytest.fixture() +def solution_variable(): + return UserVariable(name="solution_variable", value=solution.velocity) + + +def test_variable_init(): + # Variables can be initialized with a... + + # Value + a = UserVariable(name="a", value=1) + + # Dimensioned value + b = UserVariable(name="b", value=1 * u.m) + + # Expression (possibly with other variable) + c = UserVariable(name="c", value=b + 1 * u.m) + + # A dictionary (can contain extra values - important for frontend) + d = UserVariable.model_validate({"name": "d", "value": 1, "extra": "foo"}) + + +def test_expression_init(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[float] = pd.Field() + + # Declare a variable + x = UserVariable(name="x", value=1) + + # Initialize with value + model_1 = TestModel(field=1) + assert isinstance(model_1.field, float) + assert model_1.field == 1 + assert str(model_1.field) == "1.0" + + # Initialize with variable + model_2 = TestModel(field=x) + assert isinstance(model_2.field, Expression) + assert model_2.field.evaluate() == 1 + assert str(model_2.field) == "x" + + # Initialize with variable and value + model_3 = TestModel(field=x + 1) + assert isinstance(model_3.field, Expression) + assert model_3.field.evaluate() == 2 + assert str(model_3.field) == "x + 1" + + # Initialize with another expression + model_4 = TestModel(field=model_3.field + 1) + assert isinstance(model_4.field, Expression) + assert model_4.field.evaluate() == 3 + assert str(model_4.field) == "x + 1 + 1" + + +def test_variable_reassignment(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[float] = pd.Field() + + # Declare a variable + x = UserVariable(name="x", value=1) + + model = TestModel(field=x) + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 1 + assert str(model.field) == "x" + + # Change variable value + x.value = 2 + + assert model.field.evaluate() == 2 + + +def test_expression_operators(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[float] = pd.Field() + + # Declare two variables + x = UserVariable(name="x", value=3) + y = UserVariable(name="y", value=2) + + model = TestModel(field=x + y) + + # Addition + model.field = x + y + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 5 + assert str(model.field) == "x + y" + + # Subtraction + model.field = x - y + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 1 + assert str(model.field) == "x - y" + + # Multiplication + model.field = x * y + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 6 + assert str(model.field) == "x * y" + + # Division + model.field = x / y + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 1.5 + assert str(model.field) == "x / y" + + # Exponentiation + model.field = x**y + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 9 + assert str(model.field) == "x ** y" + + # Modulus + model.field = x % y + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 1 + assert str(model.field) == "x % y" + + # Negation + model.field = -x + assert isinstance(model.field, Expression) + assert model.field.evaluate() == -3 + assert str(model.field) == "-x" + + # Identity + model.field = +x + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 3 + assert str(model.field) == "+x" + + # Complex statement + model.field = ((x - 2 * x) + (x + y) / 2 - 2**x) % 4 + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 3.5 + assert str(model.field) == "(x - 2 * x + (x + y) / 2 - 2 ** x) % 4" + + +def test_dimensioned_expressions(): + class TestModel(Flow360BaseModel): + length: ValueOrExpression[LengthType] = pd.Field() + angle: ValueOrExpression[AngleType] = pd.Field() + mass: ValueOrExpression[MassType] = pd.Field() + time: ValueOrExpression[TimeType] = pd.Field() + absolute_temp: ValueOrExpression[AbsoluteTemperatureType] = pd.Field() + velocity: ValueOrExpression[VelocityType] = pd.Field() + area: ValueOrExpression[AreaType] = pd.Field() + force: ValueOrExpression[ForceType] = pd.Field() + pressure: ValueOrExpression[PressureType] = pd.Field() + density: ValueOrExpression[DensityType] = pd.Field() + viscosity: ValueOrExpression[ViscosityType] = pd.Field() + power: ValueOrExpression[PowerType] = pd.Field() + moment: ValueOrExpression[MomentType] = pd.Field() + angular_velocity: ValueOrExpression[AngularVelocityType] = pd.Field() + heat_flux: ValueOrExpression[HeatFluxType] = pd.Field() + heat_source: ValueOrExpression[HeatSourceType] = pd.Field() + specific_heat_capacity: ValueOrExpression[SpecificHeatCapacityType] = pd.Field() + thermal_conductivity: ValueOrExpression[ThermalConductivityType] = pd.Field() + inverse_area: ValueOrExpression[InverseAreaType] = pd.Field() + inverse_length: ValueOrExpression[InverseLengthType] = pd.Field() + mass_flow_rate: ValueOrExpression[MassFlowRateType] = pd.Field() + specific_energy: ValueOrExpression[SpecificEnergyType] = pd.Field() + frequency: ValueOrExpression[FrequencyType] = pd.Field() + + model_legacy = TestModel( + length=1 * u.m, + angle=1 * u.rad, + mass=1 * u.kg, + time=1 * u.s, + absolute_temp=1 * u.K, + velocity=1 * u.m / u.s, + area=1 * u.m**2, + force=1 * u.N, + pressure=1 * u.Pa, + density=1 * u.kg / u.m**3, + viscosity=1 * u.Pa * u.s, + power=1 * u.W, + moment=1 * u.N * u.m, + angular_velocity=1 * u.rad / u.s, + heat_flux=1 * u.W / u.m**2, + heat_source=1 * u.kg / u.m / u.s**3, + specific_heat_capacity=1 * u.J / u.kg / u.K, + thermal_conductivity=1 * u.W / u.m / u.K, + inverse_area=1 / u.m**2, + inverse_length=1 / u.m, + mass_flow_rate=1 * u.kg / u.s, + specific_energy=1 * u.J / u.kg, + frequency=1 * u.Hz, + ) + + assert model_legacy + + x = UserVariable(name="x", value=1) + + model_expression = TestModel( + length=x * u.m, + angle=x * u.rad, + mass=x * u.kg, + time=x * u.s, + absolute_temp=x * u.K, + velocity=x * u.m / u.s, + area=x * u.m**2, + force=x * u.N, + pressure=x * u.Pa, + density=x * u.kg / u.m**3, + viscosity=x * u.Pa * u.s, + power=x * u.W, + moment=x * u.N * u.m, + angular_velocity=x * u.rad / u.s, + heat_flux=x * u.W / u.m**2, + heat_source=x * u.kg / u.m / u.s**3, + specific_heat_capacity=x * u.J / u.kg / u.K, + thermal_conductivity=x * u.W / u.m / u.K, + inverse_area=x / u.m**2, + inverse_length=x / u.m, + mass_flow_rate=x * u.kg / u.s, + specific_energy=x * u.J / u.kg, + frequency=x * u.Hz, + ) + + assert model_expression + + +def test_constrained_scalar_type(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[Annotated[float, pd.Field(strict=True, ge=0)]] = pd.Field() + + x = UserVariable(name="x", value=1) + + model = TestModel(field=x) + + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 1 + assert str(model.field) == "x" + + with pytest.raises(pd.ValidationError): + model.field = -x + + +def test_constrained_dimensioned_type(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[LengthType.Positive] = pd.Field() + + x = UserVariable(name="x", value=1) + + model = TestModel(field=x * u.m) + + assert isinstance(model.field, Expression) + assert model.field.evaluate() == 1 * u.m + assert str(model.field) == "x * u.m" + + with pytest.raises(pd.ValidationError): + model.field = -x * u.m + + +def test_vector_types(): + class TestModel(Flow360BaseModel): + vector: ValueOrExpression[LengthType.Vector] = pd.Field() + axis: ValueOrExpression[LengthType.Axis] = pd.Field() + array: ValueOrExpression[LengthType.Array] = pd.Field() + direction: ValueOrExpression[LengthType.Direction] = pd.Field() + moment: ValueOrExpression[LengthType.Moment] = pd.Field() + + x = UserVariable(name="x", value=[1, 0, 0] * u.m) + y = UserVariable(name="y", value=[0, 0, 0] * u.m) + z = UserVariable(name="z", value=[1, 0, 0, 0] * u.m) + w = UserVariable(name="w", value=[1, 1, 1] * u.m) + + model = TestModel(vector=y, axis=x, array=z, direction=x, moment=w) + + assert isinstance(model.vector, Expression) + assert (model.vector.evaluate() == [0, 0, 0] * u.m).all() + assert str(model.vector) == "y" + + assert isinstance(model.axis, Expression) + assert (model.axis.evaluate() == [1, 0, 0] * u.m).all() + assert str(model.axis) == "x" + + assert isinstance(model.array, Expression) + assert (model.array.evaluate() == [1, 0, 0, 0] * u.m).all() + assert str(model.array) == "z" + + assert isinstance(model.direction, Expression) + assert (model.direction.evaluate() == [1, 0, 0] * u.m).all() + assert str(model.direction) == "x" + + assert isinstance(model.moment, Expression) + assert (model.moment.evaluate() == [1, 1, 1] * u.m).all() + assert str(model.moment) == "w" + + with pytest.raises(pd.ValidationError): + model.vector = z + + with pytest.raises(pd.ValidationError): + model.axis = y + + with pytest.raises(pd.ValidationError): + model.direction = y + + with pytest.raises(pd.ValidationError): + model.moment = x + + +def test_solver_builtin(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[float] = pd.Field() + + x = UserVariable(name="x", value=4) + + model = TestModel(field=x * u.m + solution.y_plus * u.cm) + + assert str(model.field) == "x * u.m + solution.y_plus * u.cm" + + # Raises when trying to evaluate with a message about this variable being blacklisted + with pytest.raises(ValueError): + model.field.evaluate() + + +def test_serializer( + constant_variable, + constant_array, + constant_unyt_quantity, + constant_unyt_array, + solution_variable, +): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[VelocityType] = pd.Field() + non_dim_field: Optional[ValueOrExpression[float]] = pd.Field(default=None) + + x = UserVariable(name="x", value=4) + cp = UserVariable(name="my_cp", value=solution.Cp) + + model = TestModel(field=x * u.m / u.s + 4 * x**2 * u.m / u.s, non_dim_field=cp) + + assert str(model.field) == "x * u.m / u.s + 4 * x ** 2 * u.m / u.s" + + serialized = model.model_dump() + + assert serialized["field"]["type_name"] == "expression" + assert serialized["field"]["expression"] == "x * u.m / u.s + 4 * x ** 2 * u.m / u.s" + assert serialized["non_dim_field"]["expression"] == "my_cp" + assert serialized["non_dim_field"]["evaluated_value"] == None # Not NaN anymore + + model = TestModel(field=4 * u.m / u.s) + + serialized = model.model_dump(exclude_none=True) + + assert serialized["field"]["type_name"] == "number" + assert serialized["field"]["value"] == 4 + assert serialized["field"]["units"] == "m/s" + + assert constant_variable.model_dump() == { + "name": "constant_variable", + "type_name": "UserVariable", + } + + assert constant_array.model_dump() == { + "name": "constant_array", + "type_name": "UserVariable", + } + assert constant_unyt_quantity.model_dump() == { + "name": "constant_unyt_quantity", + "type_name": "UserVariable", + } + + assert constant_unyt_array.model_dump() == { + "name": "constant_unyt_array", + "type_name": "UserVariable", + } + + assert solution_variable.model_dump() == { + "name": "solution_variable", + "type_name": "UserVariable", + } + + +def test_deserializer( + constant_unyt_quantity, + constant_unyt_array, + constant_variable, + constant_array, + solution_variable, +): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[VelocityType] = pd.Field() + + x = UserVariable(name="x", value=4) + + model = { + "type_name": "expression", + "expression": "x * u.m / u.s + 4 * x ** 2 * u.m / u.s", + "evaluated_value": 68.0, + "evaluated_units": "m/s", + } + + deserialized = TestModel(field=model) + + assert str(deserialized.field) == "x * u.m / u.s + 4 * x ** 2 * u.m / u.s" + + model = {"type_name": "number", "value": 4.0, "units": "m/s"} + + deserialized = TestModel(field=model) + + assert str(deserialized.field) == "4.0 m/s" + + # Constant unyt quantity + model = { + "name": "constant_unyt_quantity", + "value": { + "evaluated_units": None, + "evaluated_value": None, + "expression": None, + "output_units": None, + "type_name": "number", + "units": "m", + "value": 10.0, + }, + } + deserialized = UserVariable.model_validate(model) + assert deserialized == constant_unyt_quantity + + # Constant unyt array + model = { + "name": "constant_unyt_array", + "value": { + "evaluated_units": None, + "evaluated_value": None, + "expression": None, + "output_units": None, + "type_name": "number", + "units": "m", + "value": [10, 20], + }, + } + deserialized = UserVariable.model_validate(model) + assert deserialized == constant_unyt_array + + # Constant quantity + model = { + "name": "constant_variable", + "value": { + "evaluated_units": None, + "evaluated_value": None, + "expression": None, + "output_units": None, + "type_name": "number", + "units": None, + "value": 10.0, + }, + } + deserialized = UserVariable.model_validate(model) + assert deserialized == constant_variable + + # Constant array + model = { + "name": "constant_array", + "value": { + "evaluated_units": None, + "evaluated_value": None, + "expression": None, + "output_units": None, + "type_name": "number", + "units": None, + "value": [10, 20], + }, + } + deserialized = UserVariable.model_validate(model) + assert deserialized == constant_array + + # Solver variable (NaN-None handling) + model = { + "name": "solution_variable", + "value": { + "evaluated_units": "m/s", + "evaluated_value": [None, None, None], + "expression": "solution.velocity", + "output_units": None, + "type_name": "expression", + "units": None, + "value": None, + }, + } + deserialized = UserVariable.model_validate(model) + assert deserialized == solution_variable + assert all( + np.isnan(item) + for item in deserialized.value.evaluate(raise_on_non_evaluable=False, force_evaluate=True) + ) + + +def test_subscript_access(): + class ScalarModel(Flow360BaseModel): + scalar: ValueOrExpression[float] = pd.Field() + + x = UserVariable(name="x", value=[2, 3, 4]) + + model = ScalarModel(scalar=x[0] + x[1] + x[2] + 1) + + assert str(model.scalar) == "x[0] + x[1] + x[2] + 1" + + assert model.scalar.evaluate() == 10 + + model = ScalarModel(scalar="x[0] + x[1] + x[2] + 1") + + assert str(model.scalar) == "x[0] + x[1] + x[2] + 1" + + assert model.scalar.evaluate() == 10 + + +def test_error_message(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[VelocityType] = pd.Field() + + x = UserVariable(name="x", value=4) + + try: + TestModel(field="1 + nonexisting * 1") + except pd.ValidationError as err: + validation_errors = err.errors() + + assert len(validation_errors) >= 1 + assert validation_errors[0]["type"] == "value_error" + assert "Name 'nonexisting' is not defined" in validation_errors[0]["msg"] + + try: + TestModel(field="1 + x * 1") + except pd.ValidationError as err: + validation_errors = err.errors() + + assert len(validation_errors) >= 1 + assert validation_errors[0]["type"] == "value_error" + assert "does not match (length)/(time) dimension" in validation_errors[0]["msg"] + + try: + TestModel(field="1 * 1 +") + except pd.ValidationError as err: + validation_errors = err.errors() + + assert len(validation_errors) >= 1 + assert validation_errors[0]["type"] == "value_error" + assert "invalid syntax" in validation_errors[0]["msg"] + assert "1 * 1 +" in validation_errors[0]["msg"] + assert "line" in validation_errors[0]["ctx"] + assert "column" in validation_errors[0]["ctx"] + assert validation_errors[0]["ctx"]["column"] == 8 + + try: + TestModel(field="1 * 1 +* 2") + except pd.ValidationError as err: + validation_errors = err.errors() + + assert len(validation_errors) >= 1 + assert validation_errors[0]["type"] == "value_error" + assert "invalid syntax" in validation_errors[0]["msg"] + assert "1 * 1 +* 2" in validation_errors[0]["msg"] + assert "line" in validation_errors[0]["ctx"] + assert "column" in validation_errors[0]["ctx"] + assert validation_errors[0]["ctx"]["column"] == 8 + + try: + TestModel(field="1 * 1 + (2") + except pd.ValidationError as err: + validation_errors = err.errors() + + assert len(validation_errors) == 1 + assert validation_errors[0]["type"] == "value_error" + assert "line" in validation_errors[0]["ctx"] + assert "column" in validation_errors[0]["ctx"] + assert validation_errors[0]["ctx"]["column"] in ( + 9, + 11, + ) # Python 3.9 report error on col 11, error message is also different + + with pytest.raises( + ValueError, + match=re.escape( + "Vector operation (__add__ between solution.velocity and [1 2 3] cm/ms) not supported for variables. Please write expression for each component." + ), + ): + UserVariable(name="x", value=solution.velocity + [1, 2, 3] * u.cm / u.ms) + + errors, _, _ = validate_expression( + variables=[], expressions=["solution.velocity + [1, 2, 3] * u.cm / u.ms"] + ) + assert len(errors) == 1 + assert errors[0]["type"] == "value_error" + assert ( + "Vector operation (__add__ between solution.velocity and [1 2 3] cm/ms) not supported for variables. Please write expression for each component." + in errors[0]["msg"] + ) + + +def test_solver_translation(): + timestepping_unsteady = Unsteady(steps=12, step_size=0.1 * u.s) + solid_model = Solid( + volumes=[GenericVolume(name="CHTSolid")], + material=aluminum, + volumetric_heat_source="0", + initial_condition=HeatEquationInitialCondition(temperature="10"), + ) + surface_output_with_residual_heat_solver = SurfaceOutput( + name="surface", + surfaces=[Surface(name="noSlipWall")], + write_single_file=True, + output_fields=["residualHeatSolver"], + ) + water = Water( + name="h2o", density=1000 * u.kg / u.m**3, dynamic_viscosity=0.001 * u.kg / u.m / u.s + ) + liquid_operating_condition = LiquidOperatingCondition( + velocity_magnitude=50 * u.m / u.s, + reference_velocity_magnitude=100 * u.m / u.s, + material=water, + ) + + # Valid simulation params + with SI_unit_system: + params = SimulationParams( + models=[solid_model], + operating_condition=liquid_operating_condition, + time_stepping=timestepping_unsteady, + outputs=[surface_output_with_residual_heat_solver], + private_attribute_asset_cache=AssetCache(project_length_unit=2 * u.m), + ) + + x = UserVariable(name="x", value=4) + y = UserVariable(name="y", value=x + 1) + + # Showcased features: + expression = Expression.model_validate(x * u.m**2) + + # 1. Units are converted to flow360 unit system using the provided params (1m**2 -> 0.25 because of length unit) + # 2. User variables are inlined (for numeric value types) + assert expression.to_solver_code(params) == "(4.0 * pow(0.5, 2))" + + # 3. User variables are inlined (for expression value types) + expression = Expression.model_validate(y * u.m**2) + assert expression.to_solver_code(params) == "(5.0 * pow(0.5, 2))" + + # 4. For solver variables, the units are stripped (assumed to be in solver units so factor == 1.0) + expression = Expression.model_validate(y * u.m / u.s + control.MachRef) + assert expression.to_solver_code(params) == "(((5.0 * 0.5) / 500.0) + machRef)" + + +def test_cyclic_dependencies(): + x = UserVariable(name="x", value=4) + y = UserVariable(name="y", value=x) + + # If we try to create a cyclic dependency we throw a validation error + # The error contains info about the cyclic dependency, so here its x -> y -> x + with pytest.raises(pd.ValidationError): + x.value = y + + z = UserVariable(name="z", value=4) + + with pytest.raises(pd.ValidationError): + z.value = z + + +def test_auto_alias(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[VelocityType] = pd.Field() + + x = UserVariable(name="x", value=4) + + unaliased = { + "type_name": "expression", + "expression": "(x * u.m) / u.s + (((4 * (x ** 2)) * u.m) / u.s)", + "evaluated_value": 68.0, + "evaluated_units": "m/s", + } + + aliased = { + "typeName": "expression", + "expression": "(x * u.m) / u.s + (((4 * (x ** 2)) * u.m) / u.s)", + "evaluatedValue": 68.0, + "evaluatedUnits": "m/s", + } + + model_1 = TestModel(field=unaliased) + model_2 = TestModel(field=aliased) + + assert str(model_1.field) == "x * u.m / u.s + 4 * x ** 2 * u.m / u.s" + assert str(model_2.field) == "x * u.m / u.s + 4 * x ** 2 * u.m / u.s" + + +def test_variable_space_init(): + # Simulating loading a SimulationParams object from file - ensure that the variable space is loaded correctly + with open("data/simulation.json", "r+") as fh: + data = json.load(fh) + + params, errors, _ = validate_model( + params_as_dict=data, validated_by=ValidationCalledBy.LOCAL, root_item_type="Geometry" + ) + + assert errors is None + evaluated = params.reference_geometry.area.evaluate() + + assert evaluated == 1.0 * u.m**2 + + +def test_cross_product(): + class TestModel(Flow360BaseModel): + field: ValueOrExpression[VelocityType.Vector] = pd.Field() + + x = UserVariable(name="x", value=[1, 2, 3]) + + model = TestModel(field=math.cross(x, [3, 2, 1]) * u.m / u.s) + assert ( + str(model.field) + == "[x[1] * 1 - x[2] * 2, x[2] * 3 - x[0] * 1, x[0] * 2 - x[1] * 3] * u.m / u.s" + ) + + assert (model.field.evaluate() == [-4, 8, -4] * u.m / u.s).all() + + model = TestModel(field="math.cross(x, [3, 2, 1]) * u.m / u.s") + assert str(model.field) == "math.cross(x, [3, 2, 1]) * u.m / u.s" + + result = model.field.evaluate() + assert (result == [-4, 8, -4] * u.m / u.s).all() + + +def test_vector_solver_variable_cross_product_translation(): + with open("data/simulation.json", "r+") as fh: + data = json.load(fh) + + params, errors, _ = validate_model( + params_as_dict=data, validated_by=ValidationCalledBy.LOCAL, root_item_type="Geometry" + ) + + class TestModel(Flow360BaseModel): + field: ValueOrExpression[LengthType.Vector] = pd.Field() + + # From string + expr_1 = TestModel(field="math.cross([1, 2, 3], [1, 2, 3] * u.m)").field + assert str(expr_1) == "math.cross([1, 2, 3], [1, 2, 3] * u.m)" + + # During solver translation both options are inlined the same way through partial evaluation + expr_1.to_solver_code(params) + + # From python code + expr_2 = TestModel(field=math.cross([1, 2, 3], solution.coordinate)).field + assert ( + str(expr_2) == "[2 * solution.coordinate[2] - 3 * solution.coordinate[1], " + "3 * solution.coordinate[0] - 1 * solution.coordinate[2], " + "1 * solution.coordinate[1] - 2 * solution.coordinate[0]]" + ) + + # During solver translation both options are inlined the same way through partial evaluation + expr_2.to_solver_code(params) + + +def test_cross_function_use_case(): + + with SI_unit_system: + params = SimulationParams( + private_attribute_asset_cache=AssetCache(project_length_unit=10 * u.m) + ) + + print("\n1 Python mode\n") + a = UserVariable(name="a", value=math.cross([3, 2, 1] * u.m, solution.coordinate)) + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert str(res) == ( + "[2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1], " + "1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2], " + "3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({(((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])), (((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])), (((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0]))})" + ) + + print("\n1.1 Python mode but arg swapped\n") + a.value = math.cross(solution.coordinate, [3, 2, 1] * u.m) + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert str(res) == ( + "[solution.coordinate[1] * 1 * u.m - solution.coordinate[2] * 2 * u.m, " + "solution.coordinate[2] * 3 * u.m - solution.coordinate[0] * 1 * u.m, " + "solution.coordinate[0] * 2 * u.m - solution.coordinate[1] * 3 * u.m]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({(((coordinate[1] * 1) * 0.1) - ((coordinate[2] * 2) * 0.1)), (((coordinate[2] * 3) * 0.1) - ((coordinate[0] * 1) * 0.1)), (((coordinate[0] * 2) * 0.1) - ((coordinate[1] * 3) * 0.1))})" + ) + + print("\n2 Taking advantage of unyt as much as possible\n") + a.value = math.cross([3, 2, 1] * u.m, [2, 2, 1] * u.m) + assert all(a.value == [0, -1, 2] * u.m * u.m) + + print("\n3 (Units defined in components)\n") + a.value = math.cross([3 * u.m, 2 * u.m, 1 * u.m], [2 * u.m, 2 * u.m, 1 * u.m]) + assert a.value == [0 * u.m * u.m, -1 * u.m * u.m, 2 * u.m * u.m] + + print("\n4 Serialized version\n") + a.value = "math.cross([3, 2, 1] * u.m, solution.coordinate)" + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert str(res) == ( + "[2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1], " + "1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2], " + "3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({(((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])), (((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])), (((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0]))})" + ) + + print("\n5 Recursive cross in Python mode\n") + a.value = math.cross(math.cross([3, 2, 1] * u.m, solution.coordinate), [3, 2, 1] * u.m) + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert str(res) == ( + "[(1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2]) * 1 * u.m - (3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]) * 2 * u.m, " + "(3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]) * 3 * u.m - (2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1]) * 1 * u.m, " + "(2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1]) * 2 * u.m - (1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2]) * 3 * u.m]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({((((((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])) * 1) * 0.1) - (((((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0])) * 2) * 0.1)), ((((((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0])) * 3) * 0.1) - (((((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])) * 1) * 0.1)), ((((((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])) * 2) * 0.1) - (((((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])) * 3) * 0.1))})" + ) + + print("\n6 Recursive cross in String mode\n") + a.value = "math.cross(math.cross([3, 2, 1] * u.m, solution.coordinate), [3, 2, 1] * u.m)" + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert ( + str(res) + == "[(1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2]) * 1 * u.m - (3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]) * 2 * u.m, " + "(3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]) * 3 * u.m - (2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1]) * 1 * u.m, " + "(2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1]) * 2 * u.m - (1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2]) * 3 * u.m]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({((((((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])) * 1) * 0.1) - (((((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0])) * 2) * 0.1)), ((((((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0])) * 3) * 0.1) - (((((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])) * 1) * 0.1)), ((((((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])) * 2) * 0.1) - (((((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])) * 3) * 0.1))})" + ) + + print("\n7 Using other variabels in Python mode\n") + b = UserVariable(name="b", value=math.cross([3, 2, 1] * u.m, solution.coordinate)) + a.value = math.cross(b, [3, 2, 1] * u.m) + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert str(res) == ( + "[(1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2]) * 1 * u.m - (3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]) * 2 * u.m, " + "(3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]) * 3 * u.m - (2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1]) * 1 * u.m, " + "(2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1]) * 2 * u.m - (1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2]) * 3 * u.m]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({((((((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])) * 1) * 0.1) - (((((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0])) * 2) * 0.1)), ((((((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0])) * 3) * 0.1) - (((((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])) * 1) * 0.1)), ((((((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])) * 2) * 0.1) - (((((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])) * 3) * 0.1))})" + ) + + print("\n8 Using other constant variabels in Python mode\n") + b.value = [3, 2, 1] * u.m + a.value = math.cross(b, solution.coordinate) + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert str(res) == ( + "[2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1], " + "1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2], " + "3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({(((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])), (((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])), (((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0]))})" + ) + + print("\n9 Using non-unyt_array\n") + b.value = [3 * u.m, 2 * u.m, 1 * u.m] + a.value = math.cross(b, solution.coordinate) + res = a.value.evaluate(raise_on_non_evaluable=False, force_evaluate=False) + assert str(res) == ( + "[2 * u.m * solution.coordinate[2] - 1 * u.m * solution.coordinate[1], " + "1 * u.m * solution.coordinate[0] - 3 * u.m * solution.coordinate[2], " + "3 * u.m * solution.coordinate[1] - 2 * u.m * solution.coordinate[0]]" + ) + assert ( + a.value.to_solver_code(params) + == "std::vector({(((2 * 0.1) * coordinate[2]) - ((1 * 0.1) * coordinate[1])), (((1 * 0.1) * coordinate[0]) - ((3 * 0.1) * coordinate[2])), (((3 * 0.1) * coordinate[1]) - ((2 * 0.1) * coordinate[0]))})" + ) + + +def test_expression_indexing(): + a = UserVariable(name="a", value=1) + b = UserVariable(name="b", value=[1, 2, 3]) + c = UserVariable(name="c", value=[3, 2, 1]) + + # Cannot simplify without non-statically evaluable index object (expression for example) + cross_result = math.cross(b, c) + expr = Expression.model_validate(cross_result[a]) + + assert ( + str(expr) + == "[b[1] * c[2] - b[2] * c[1], b[2] * c[0] - b[0] * c[2], b[0] * c[1] - b[1] * c[0]][a]" + ) + assert expr.evaluate() == 8 + + # Cannot simplify without non-statically evaluable index object (expression for example) + expr = Expression.model_validate(cross_result[1]) + + assert str(expr) == "b[2] * c[0] - b[0] * c[2]" + assert expr.evaluate() == 8 + + +def test_to_file_from_file_expression( + constant_variable, constant_array, constant_unyt_quantity, constant_unyt_array +): + with SI_unit_system: + params = SimulationParams( + reference_geometry=ReferenceGeometry( + area=10 * u.m**2, + ), + outputs=[ + VolumeOutput( + output_fields=[ + solution.mut.in_unit(new_name="mut_in_SI", new_unit="cm**2/min"), + constant_variable, + constant_array, + constant_unyt_quantity, + constant_unyt_array, + ] + ) + ], + ) + + to_file_from_file_test(params) + + +def assert_ignore_space(expected: str, actual: str): + """For expression comparison, ignore spaces""" + assert expected.replace(" ", "") == actual.replace(" ", "") + + +def test_udf_generator(): + with SI_unit_system: + params = SimulationParams( + operating_condition=LiquidOperatingCondition( + velocity_magnitude=5 * u.m / u.s, + ), + private_attribute_asset_cache=AssetCache(project_length_unit=10 * u.m), + ) + # Scalar output + result = user_variable_to_udf( + solution.mut.in_unit(new_name="mut_in_km", new_unit="km**2/s"), input_params=params + ) + # velocity scale = 100 m/s, length scale = 10m, mut_scale = 1000 m**2/s -> 0.01 *km**2/s + assert result.expression == "mut_in_km = (mut * 0.001);" + + # Vector output + result = user_variable_to_udf( + solution.velocity.in_unit(new_name="velocity_in_SI", new_unit="m/s"), input_params=params + ) + # velocity scale = 100 m/s, + assert ( + result.expression + == "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;velocity_in_SI[0] = (___velocity[0] * 100.0); velocity_in_SI[1] = (___velocity[1] * 100.0); velocity_in_SI[2] = (___velocity[2] * 100.0);" + ) + + vel_cross_vec = UserVariable( + name="vel_cross_vec", value=math.cross(solution.velocity, [1, 2, 3] * u.cm) + ).in_unit(new_unit="m*km/s/s") + result = user_variable_to_udf(vel_cross_vec, input_params=params) + assert ( + result.expression + == "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;vel_cross_vec[0] = ((((___velocity[1] * 3) * 0.001) - ((___velocity[2] * 2) * 0.001)) * 10.0); vel_cross_vec[1] = ((((___velocity[2] * 1) * 0.001) - ((___velocity[0] * 3) * 0.001)) * 10.0); vel_cross_vec[2] = ((((___velocity[0] * 2) * 0.001) - ((___velocity[1] * 1) * 0.001)) * 10.0);" + ) + + vel_cross_vec = UserVariable( + name="vel_cross_vec", value=math.cross(solution.velocity, [1, 2, 3] * u.cm) + ).in_unit(new_unit="CGS_unit_system") + assert vel_cross_vec.value.get_output_units(input_params=params) == u.cm**2 / u.s + + +def test_project_variables_serialization(): + ccc = UserVariable(name="ccc", value=12 * u.m / u.s) + aaa = UserVariable( + name="aaa", value=[solution.velocity[0] + ccc, solution.velocity[1], solution.velocity[2]] + ) + bbb = UserVariable(name="bbb", value=[aaa[0] + 14 * u.m / u.s, aaa[1], aaa[2]]).in_unit( + new_unit="km/ms" + ) + + with SI_unit_system: + params = SimulationParams( + operating_condition=AerospaceCondition( + velocity_magnitude=10 * u.m / u.s, + reference_velocity_magnitude=10 * u.m / u.s, + ), + outputs=[ + VolumeOutput( + output_fields=[ + bbb, + ] + ), + ProbeOutput( + probe_points=[ + Point(name="pt1", location=(1, 2, 3), private_attribute_id="111") + ], + output_fields=[bbb], + ), + ], + ) + + params = save_user_variables(params) + + with open("ref/simulation_with_project_variables.json", "r+") as fh: + ref_data = fh.read() + + assert ref_data == params.model_dump_json(indent=4, exclude_none=True) + + +def test_project_variables_deserialization(): + with open("ref/simulation_with_project_variables.json", "r+") as fh: + data = json.load(fh) + + # Assert no variables registered yet + with pytest.raises(NameError): + context.default_context.get("aaa") + with pytest.raises(NameError): + context.default_context.get("bbb") + with pytest.raises(NameError): + context.default_context.get("ccc") + + params, _, _ = validate_model( + params_as_dict=data, + root_item_type=None, + validated_by=ValidationCalledBy.LOCAL, + ) + assert params + assert ( + params.outputs[0].output_fields.items[0].value.expression + == "[aaa[0] + 14 * u.m / u.s, aaa[1], aaa[2]]" + ) + + assert params.outputs[0].output_fields.items[0].value.output_units == "km/ms" + + assert ( + params.outputs[0] + .output_fields.items[0] + .value.evaluate(force_evaluate=False, raise_on_non_evaluable=False) + .expression + == "[solution.velocity[0] + 12.0 * u.m / u.s + 14 * u.m / u.s, solution.velocity[1], solution.velocity[2]]" + ) # Fully resolvable + + +def test_overwriting_project_variables(): + UserVariable(name="a", value=1) + + with pytest.raises( + ValueError, + match="Redeclaring user variable a with new value: 2.0. Previous value: 1.0", + ): + UserVariable(name="a", value=2) + + +def test_whitelisted_callables(): + def get_user_variable_names(module): + return [attr for attr in dir(module) if isinstance(getattr(module, attr), SolverVariable)] + + solution_vars = get_user_variable_names(solution) + control_vars = get_user_variable_names(control) + + assert compare_lists(solution_vars, WHITELISTED_CALLABLES["flow360.solution"]["callables"]) + assert compare_lists(control_vars, WHITELISTED_CALLABLES["flow360.control"]["callables"]) diff --git a/tests/simulation/test_updater.py b/tests/simulation/test_updater.py index ffd022b94..e83f34f45 100644 --- a/tests/simulation/test_updater.py +++ b/tests/simulation/test_updater.py @@ -487,14 +487,14 @@ def test_updater_to_25_4_1(): assert params_new["meshing"]["defaults"]["geometry_accuracy"]["units"] == "m" -def test_updater_to_25_6_0(): +def test_updater_to_25_6_1(): with open("../data/simulation/simulation_pre_25_6_0.json", "r") as fp: params = json.load(fp) - def _update_to_25_6_0(pre_update_param_as_dict): + def _update_to_25_6_1(pre_update_param_as_dict): params_new = updater( version_from=f"25.5.1", - version_to=f"25.6.0", + version_to=f"25.6.1", params_as_dict=pre_update_param_as_dict, ) return params_new @@ -508,21 +508,21 @@ def _ensure_validity(params): assert params_new pre_update_param_as_dict = copy.deepcopy(params) - params_new = _update_to_25_6_0(pre_update_param_as_dict) + params_new = _update_to_25_6_1(pre_update_param_as_dict) assert params_new["models"][2]["velocity_direction"] == [0, -1, 0] assert "velocity_direction" not in params_new["models"][2]["spec"] _ensure_validity(params_new) pre_update_param_as_dict = copy.deepcopy(params) pre_update_param_as_dict["models"][2]["spec"]["velocity_direction"] = None - params_new = _update_to_25_6_0(pre_update_param_as_dict) + params_new = _update_to_25_6_1(pre_update_param_as_dict) assert "velocity_direction" not in params_new["models"][2] assert "velocity_direction" not in params_new["models"][2]["spec"] _ensure_validity(params_new) pre_update_param_as_dict = copy.deepcopy(params) pre_update_param_as_dict["models"][2]["spec"].pop("velocity_direction") - params_new = _update_to_25_6_0(pre_update_param_as_dict) + params_new = _update_to_25_6_1(pre_update_param_as_dict) assert "velocity_direction" not in params_new["models"][2] assert "velocity_direction" not in params_new["models"][2]["spec"] _ensure_validity(params_new) @@ -530,7 +530,7 @@ def _ensure_validity(params): pre_update_param_as_dict = copy.deepcopy(params) pre_update_param_as_dict["models"][2]["spec"].pop("velocity_direction") pre_update_param_as_dict["models"][2]["velocity_direction"] = [0, -1, 0] - params_new = _update_to_25_6_0(pre_update_param_as_dict) + params_new = _update_to_25_6_1(pre_update_param_as_dict) assert params_new["models"][2]["velocity_direction"] == [0, -1, 0] assert "velocity_direction" not in params_new["models"][2]["spec"] _ensure_validity(params_new) diff --git a/tests/simulation/translator/ref/Flow360_expression_udf.json b/tests/simulation/translator/ref/Flow360_expression_udf.json new file mode 100644 index 000000000..32d8355cc --- /dev/null +++ b/tests/simulation/translator/ref/Flow360_expression_udf.json @@ -0,0 +1,113 @@ +{ + "freestream": { + "alphaAngle": 0.0, + "betaAngle": 0.0, + "Mach": 0.8399999999999999, + "Temperature": 288.15, + "muRef": 4.292321046986499e-08 + }, + "timeStepping": { + "CFL": { + "type": "adaptive", + "min": 0.1, + "max": 10000.0, + "maxRelativeChange": 1.0, + "convergenceLimitingFactor": 0.25 + }, + "physicalSteps": 1, + "orderOfAccuracy": 2, + "maxPseudoSteps": 2000, + "timeStepSize": "inf" + }, + "navierStokesSolver": { + "absoluteTolerance": 1e-10, + "relativeTolerance": 0.0, + "orderOfAccuracy": 2, + "linearSolver": { + "maxIterations": 30 + }, + "CFLMultiplier": 1.0, + "kappaMUSCL": -1.0, + "numericalDissipationFactor": 1.0, + "limitVelocity": false, + "limitPressureDensity": false, + "lowMachPreconditioner": false, + "updateJacobianFrequency": 4, + "maxForceJacUpdatePhysicalSteps": 0, + "modelType": "Compressible", + "equationEvalFrequency": 1 + }, + "turbulenceModelSolver": { + "absoluteTolerance": 1e-08, + "relativeTolerance": 0.0, + "orderOfAccuracy": 2, + "linearSolver": { + "maxIterations": 20 + }, + "CFLMultiplier": 2.0, + "reconstructionGradientLimiter": 0.5, + "quadraticConstitutiveRelation": false, + "updateJacobianFrequency": 4, + "maxForceJacUpdatePhysicalSteps": 0, + "rotationCorrection": false, + "equationEvalFrequency": 4, + "modelType": "SpalartAllmaras", + "modelConstants": { + "C_DES": 0.72, + "C_d": 8.0, + "C_cb1": 0.1355, + "C_cb2": 0.622, + "C_sigma": 0.6666666666666666, + "C_v1": 7.1, + "C_vonKarman": 0.41, + "C_w2": 0.3, + "C_t3": 1.2, + "C_t4": 0.5, + "C_min_rd": 10.0 + }, + "DDES": false, + "ZDES": false, + "gridSizeForLES": "maxEdgeLength" + }, + "initialCondition": { + "type": "initialCondition", + "rho": "rho", + "u": "u", + "v": "v", + "w": "w", + "p": "p" + }, + "boundaries": {}, + "volumeOutput": { + "outputFields": [ + "Mach_SI", + "velocity_SI", + "uuu" + ], + "outputFormat": "paraview", + "computeTimeAverages": false, + "animationFrequency": -1, + "animationFrequencyOffset": 0, + "animationFrequencyTimeAverage": -1, + "animationFrequencyTimeAverageOffset": 0, + "startAverageIntegrationStep": -1 + }, + "userDefinedFields": [ + { + "name": "Mach_SI", + "expression": "double ___Mach;___Mach = usingLiquidAsMaterial ? 0 : sqrt(primitiveVars[1] * primitiveVars[1] + primitiveVars[2] * primitiveVars[2] + primitiveVars[3] * primitiveVars[3]) / sqrt(1.4 * primitiveVars[4] / primitiveVars[0]);Mach_SI = (___Mach * 1);" + }, + { + "name": "velocity_SI", + "expression": "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;velocity_SI[0] = (___velocity[0] * 340.2940058082124); velocity_SI[1] = (___velocity[1] * 340.2940058082124); velocity_SI[2] = (___velocity[2] * 340.2940058082124);" + }, + { + "name": "uuu", + "expression": "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;uuu[0] = (___velocity[0] * 340.2940058082124); uuu[1] = (___velocity[1] * 340.2940058082124); uuu[2] = (___velocity[2] * 340.2940058082124);" + } + ], + "usingLiquidAsMaterial": false, + "outputRescale": { + "velocityScale": 1.0 + } +} \ No newline at end of file diff --git a/tests/simulation/translator/ref/Flow360_user_variable.json b/tests/simulation/translator/ref/Flow360_user_variable.json new file mode 100644 index 000000000..da8ecfe89 --- /dev/null +++ b/tests/simulation/translator/ref/Flow360_user_variable.json @@ -0,0 +1,132 @@ +{ + "boundaries": { + "fluid/body": { + "heatFlux": 0.0, + "roughnessHeight": 0.0, + "type": "NoSlipWall" + }, + "fluid/farfield": { + "type": "Freestream" + } + }, + "freestream": { + "Mach": 0.05, + "Temperature": -1, + "alphaAngle": 5.0, + "betaAngle": 2.0, + "muRef": 5.010000000000001e-09 + }, + "initialCondition": { + "p": "p", + "rho": "rho", + "type": "initialCondition", + "u": "u", + "v": "v", + "w": "w" + }, + "navierStokesSolver": { + "CFLMultiplier": 1.0, + "absoluteTolerance": 1e-10, + "equationEvalFrequency": 1, + "kappaMUSCL": -1.0, + "limitPressureDensity": false, + "limitVelocity": false, + "linearSolver": { + "maxIterations": 30 + }, + "lowMachPreconditioner": true, + "lowMachPreconditionerThreshold": 0.05, + "maxForceJacUpdatePhysicalSteps": 0, + "modelType": "Compressible", + "numericalDissipationFactor": 1.0, + "orderOfAccuracy": 2, + "relativeTolerance": 0.0, + "updateJacobianFrequency": 4 + }, + "outputRescale": { + "velocityScale": 20.0 + }, + "timeStepping": { + "CFL": { + "type": "adaptive", + "min": 0.1, + "max": 1000000.0, + "maxRelativeChange": 50.0, + "convergenceLimitingFactor": 1.0 + }, + "physicalSteps": 123, + "orderOfAccuracy": 2, + "maxPseudoSteps": 20, + "timeStepSize": 300.0 + }, + "turbulenceModelSolver": { + "CFLMultiplier": 2.0, + "DDES": false, + "ZDES": false, + "absoluteTolerance": 1e-08, + "equationEvalFrequency": 4, + "gridSizeForLES": "maxEdgeLength", + "linearSolver": { + "maxIterations": 20 + }, + "maxForceJacUpdatePhysicalSteps": 0, + "modelConstants": { + "C_DES": 0.72, + "C_cb1": 0.1355, + "C_cb2": 0.622, + "C_d": 8.0, + "C_min_rd": 10.0, + "C_sigma": 0.6666666666666666, + "C_t3": 1.2, + "C_t4": 0.5, + "C_v1": 7.1, + "C_vonKarman": 0.41, + "C_w2": 0.3 + }, + "modelType": "SpalartAllmaras", + "orderOfAccuracy": 2, + "quadraticConstitutiveRelation": false, + "reconstructionGradientLimiter": 0.5, + "relativeTolerance": 0.0, + "rotationCorrection": false, + "updateJacobianFrequency": 4 + }, + "userDefinedFields": [ + { + "expression": "double velocity[3];velocity[0] = primitiveVars[1];velocity[1] = primitiveVars[2];velocity[2] = primitiveVars[3];velocity_magnitude = magnitude(velocity) * velocityScale;", + "name": "velocity_magnitude" + }, + { + "expression": "double ___Mach;___Mach = usingLiquidAsMaterial ? 0 : sqrt(primitiveVars[1] * primitiveVars[1] + primitiveVars[2] * primitiveVars[2] + primitiveVars[3] * primitiveVars[3]) / sqrt(1.4 * primitiveVars[4] / primitiveVars[0]);Mach_SI = (___Mach * 1);", + "name": "Mach_SI" + }, + { + "expression": "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;velocity_SI[0] = (___velocity[0] * 200.0); velocity_SI[1] = (___velocity[1] * 200.0); velocity_SI[2] = (___velocity[2] * 200.0);", + "name": "velocity_SI" + }, + { + "expression": "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;uuu[0] = (___velocity[0] * 0.0002); uuu[1] = (___velocity[1] * 0.0002); uuu[2] = (___velocity[2] * 0.0002);", + "name": "uuu" + }, + { + "expression": "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;my_var[0] = (((((2.0 * 1.0) / 200.0) * ___velocity[2]) - (((3.0 * 1.0) / 200.0) * ___velocity[1])) * 40000.0); my_var[1] = (((((3.0 * 1.0) / 200.0) * ___velocity[0]) - (((1.0 * 1.0) / 200.0) * ___velocity[2])) * 40000.0); my_var[2] = (((((1.0 * 1.0) / 200.0) * ___velocity[1]) - (((2.0 * 1.0) / 200.0) * ___velocity[0])) * 40000.0);", + "name": "my_var" + } + ], + "usingLiquidAsMaterial": true, + "volumeOutput": { + "animationFrequency": -1, + "animationFrequencyOffset": 0, + "animationFrequencyTimeAverage": -1, + "animationFrequencyTimeAverageOffset": 0, + "computeTimeAverages": false, + "outputFields": [ + "Mach_SI", + "velocity_SI", + "uuu", + "my_var" + ], + "outputFormat": "paraview", + "startAverageIntegrationStep": -1 + } +} \ No newline at end of file diff --git a/tests/simulation/translator/test_output_translation.py b/tests/simulation/translator/test_output_translation.py index 605607b0e..3918e6d8e 100644 --- a/tests/simulation/translator/test_output_translation.py +++ b/tests/simulation/translator/test_output_translation.py @@ -3,8 +3,11 @@ import pytest import flow360.component.simulation.units as u +from flow360.component.simulation.framework.updater_utils import compare_values +from flow360.component.simulation.models.material import Water from flow360.component.simulation.operating_condition.operating_condition import ( AerospaceCondition, + LiquidOperatingCondition, ) from flow360.component.simulation.outputs.output_entities import ( Point, @@ -39,16 +42,29 @@ translate_output, ) from flow360.component.simulation.unit_system import SI_unit_system +from flow360.component.simulation.user_code.variables import solution @pytest.fixture() -def volume_output_config(): +def vel_in_km_per_hr(): + return solution.velocity.in_unit(new_name="velocity_in_km_per_hr", new_unit=u.km / u.hr) + + +@pytest.fixture() +def volume_output_config(vel_in_km_per_hr): return ( VolumeOutput( frequency=1, frequency_offset=2, output_format="both", - output_fields=["primitiveVars", "betMetrics", "qcriterion", "velocity", "vorticity"], + output_fields=[ + "primitiveVars", + "betMetrics", + "qcriterion", + "velocity", + "vorticity", + vel_in_km_per_hr, + ], ), { "animationFrequency": 1, @@ -64,6 +80,7 @@ def volume_output_config(): "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ], "outputFormat": "paraview,tecplot", "startAverageIntegrationStep": -1, @@ -72,13 +89,20 @@ def volume_output_config(): @pytest.fixture() -def avg_volume_output_config(): +def avg_volume_output_config(vel_in_km_per_hr): return ( TimeAverageVolumeOutput( frequency=11, frequency_offset=12, output_format="both", - output_fields=["primitiveVars", "betMetrics", "qcriterion", "velocity", "vorticity"], + output_fields=[ + "primitiveVars", + "betMetrics", + "qcriterion", + "velocity", + "vorticity", + vel_in_km_per_hr, + ], start_step=1, ), { @@ -95,6 +119,7 @@ def avg_volume_output_config(): "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ], "outputFormat": "paraview,tecplot", "startAverageIntegrationStep": 1, @@ -103,7 +128,6 @@ def avg_volume_output_config(): def test_volume_output(volume_output_config, avg_volume_output_config): - import json ##:: volumeOutput only with SI_unit_system: @@ -145,21 +169,22 @@ def test_volume_output(volume_output_config, avg_volume_output_config): "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ], "outputFormat": "paraview,tecplot", "startAverageIntegrationStep": 1, } } - assert sorted(ref["volumeOutput"].items()) == sorted(translated["volumeOutput"].items()) + assert compare_values(ref["volumeOutput"], translated["volumeOutput"]) @pytest.fixture() -def surface_output_config(): +def surface_output_config(vel_in_km_per_hr): return ( [ SurfaceOutput( # Local entities=[Surface(name="surface1"), Surface(name="surface2")], - output_fields=["Cp"], + output_fields=["Cp", vel_in_km_per_hr], output_format="tecplot", frequency=123, frequency_offset=321, @@ -168,7 +193,7 @@ def surface_output_config(): entities=[Surface(name="surface11"), Surface(name="surface22")], frequency=123, frequency_offset=321, - output_fields=["T", "velocity", "vorticity"], + output_fields=["T", "velocity", "vorticity", vel_in_km_per_hr], output_format="tecplot", ), ], @@ -182,7 +207,7 @@ def surface_output_config(): "outputFormat": "tecplot", "startAverageIntegrationStep": -1, "surfaces": { - "surface1": {"outputFields": ["Cp"]}, + "surface1": {"outputFields": ["Cp", "velocity_in_km_per_hr"]}, "surface11": { "outputFields": [ "T", @@ -190,9 +215,10 @@ def surface_output_config(): "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ] }, - "surface2": {"outputFields": ["Cp"]}, + "surface2": {"outputFields": ["Cp", "velocity_in_km_per_hr"]}, "surface22": { "outputFields": [ "T", @@ -200,6 +226,7 @@ def surface_output_config(): "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ] }, }, @@ -209,15 +236,15 @@ def surface_output_config(): @pytest.fixture() -def avg_surface_output_config(): +def avg_surface_output_config(vel_in_km_per_hr): return [ TimeAverageSurfaceOutput( # Local entities=[Surface(name="surface1"), Surface(name="surface2")], - output_fields=["Cp"], + output_fields=["Cp", vel_in_km_per_hr], ), TimeAverageSurfaceOutput( # Local entities=[Surface(name="surface3")], - output_fields=["T"], + output_fields=["T", vel_in_km_per_hr], ), ] @@ -251,7 +278,7 @@ def test_surface_output( "outputFormat": "paraview", "startAverageIntegrationStep": -1, "surfaces": { - "surface1": {"outputFields": ["Cp"]}, + "surface1": {"outputFields": ["Cp", "velocity_in_km_per_hr"]}, "surface11": { "outputFields": [ "T", @@ -259,9 +286,10 @@ def test_surface_output( "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ] }, - "surface2": {"outputFields": ["Cp"]}, + "surface2": {"outputFields": ["Cp", "velocity_in_km_per_hr"]}, "surface22": { "outputFields": [ "T", @@ -269,9 +297,10 @@ def test_surface_output( "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ] }, - "surface3": {"outputFields": ["T"]}, + "surface3": {"outputFields": ["T", "velocity_in_km_per_hr"]}, }, "writeSingleFile": False, } @@ -279,7 +308,7 @@ def test_surface_output( @pytest.fixture() -def sliceoutput_config(): +def sliceoutput_config(vel_in_km_per_hr): return ( [ SliceOutput( # Local @@ -295,7 +324,13 @@ def sliceoutput_config(): origin=(0.12, 0.13, 0.14) * u.m, ), ], - output_fields=["Cp", "velocity", "vorticity", "vorticityMagnitude"], + output_fields=[ + "Cp", + "velocity", + "vorticity", + "vorticityMagnitude", + vel_in_km_per_hr, + ], frequency=33, frequency_offset=22, output_format="tecplot", @@ -316,7 +351,7 @@ def sliceoutput_config(): frequency=33, frequency_offset=22, output_format="tecplot", - output_fields=["T", "primitiveVars"], + output_fields=["T", "primitiveVars", vel_in_km_per_hr], ), ], { @@ -330,12 +365,12 @@ def sliceoutput_config(): "outputFormat": "tecplot", "slices": { "slice01": { - "outputFields": ["T", "primitiveVars"], + "outputFields": ["T", "primitiveVars", "velocity_in_km_per_hr"], "sliceNormal": [1.0, 0.0, 0.0], "sliceOrigin": [10.02, 10.03, 10.04], }, "slice02": { - "outputFields": ["T", "primitiveVars"], + "outputFields": ["T", "primitiveVars", "velocity_in_km_per_hr"], "sliceNormal": [0.6, 0.0, 0.8], "sliceOrigin": [6.12, 6.13, 6.14], }, @@ -346,6 +381,7 @@ def sliceoutput_config(): "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ], "sliceNormal": [0.0, 1.0, 0.0], "sliceOrigin": [0.02, 0.03, 0.04], @@ -357,6 +393,7 @@ def sliceoutput_config(): "velocity_magnitude", "vorticity", "vorticityMagnitude", + "velocity_in_km_per_hr", ], "sliceNormal": [0.6, 0.8, 0.0], "sliceOrigin": [0.12, 0.13, 0.14], @@ -380,7 +417,7 @@ def test_slice_output( @pytest.fixture() -def isosurface_output_config(): +def isosurface_output_config(vel_in_km_per_hr): return ( [ IsosurfaceOutput( # Local @@ -406,7 +443,7 @@ def isosurface_output_config(): field="vorticity_z", ), ], - output_fields=["Cp"], + output_fields=["Cp", vel_in_km_per_hr], frequency=332, frequency_offset=222, output_format="paraview", @@ -427,7 +464,7 @@ def isosurface_output_config(): frequency=332, frequency_offset=222, output_format="paraview", - output_fields=["T", "primitiveVars"], + output_fields=["T", "primitiveVars", vel_in_km_per_hr], ), ], { @@ -435,32 +472,32 @@ def isosurface_output_config(): "animationFrequencyOffset": 222, "isoSurfaces": { "isosurface 01": { - "outputFields": ["T", "primitiveVars"], + "outputFields": ["T", "primitiveVars", "velocity_in_km_per_hr"], "surfaceField": "nuHat", "surfaceFieldMagnitude": 0.0001, }, "isosurface 02": { - "outputFields": ["T", "primitiveVars"], + "outputFields": ["T", "primitiveVars", "velocity_in_km_per_hr"], "surfaceField": "qcriterion", "surfaceFieldMagnitude": 10000.0, }, "isosurface 10": { - "outputFields": ["Cp"], + "outputFields": ["Cp", "velocity_in_km_per_hr"], "surfaceField": "T", "surfaceFieldMagnitude": 0.0001, }, "isosurface 14": { - "outputFields": ["Cp"], + "outputFields": ["Cp", "velocity_in_km_per_hr"], "surfaceField": "qcriterion", "surfaceFieldMagnitude": 20.431, }, "isosurface 15": { - "outputFields": ["Cp"], + "outputFields": ["Cp", "velocity_in_km_per_hr"], "surfaceField": "velocity_x", "surfaceFieldMagnitude": 0.1, }, "isosurface 16": { - "outputFields": ["Cp"], + "outputFields": ["Cp", "velocity_in_km_per_hr"], "surfaceField": "vorticity_z", "surfaceFieldMagnitude": 0.2, }, @@ -486,7 +523,7 @@ def test_isosurface_output( @pytest.fixture() -def probe_output_config(): +def probe_output_config(vel_in_km_per_hr): return ( [ ProbeOutput( # Local @@ -501,7 +538,7 @@ def probe_output_config(): location=[0.0001, 0.02, 0.03] * u.m, ), ], - output_fields=["primitiveVars", "Cp"], + output_fields=["primitiveVars", "Cp", vel_in_km_per_hr], ), ProbeOutput( # Local name="prb 12", @@ -511,7 +548,7 @@ def probe_output_config(): location=[10, 10.02, 10.03] * u.cm, ), ], - output_fields=["primitiveVars", "Cp"], + output_fields=["primitiveVars", "Cp", vel_in_km_per_hr], ), TimeAverageProbeOutput( # Local name="prb average", @@ -521,7 +558,7 @@ def probe_output_config(): location=[10, 10.02, 10.03] * u.cm, ), ], - output_fields=["primitiveVars", "Cp", "T"], + output_fields=["primitiveVars", "Cp", "T", vel_in_km_per_hr], frequency=10, ), ], @@ -534,7 +571,7 @@ def probe_output_config(): "start": [[1e-2, 1.02e-2, 0.0003], [0.0001, 0.02, 0.03]], "end": [[1e-2, 1.02e-2, 0.0003], [0.0001, 0.02, 0.03]], "numberOfPoints": [1, 1], - "outputFields": ["primitiveVars", "Cp"], + "outputFields": ["primitiveVars", "Cp", "velocity_in_km_per_hr"], "type": "lineProbe", }, "prb 12": { @@ -544,7 +581,7 @@ def probe_output_config(): "start": [[10e-2, 10.02e-2, 10.03e-2]], "end": [[10e-2, 10.02e-2, 10.03e-2]], "numberOfPoints": [1], - "outputFields": ["primitiveVars", "Cp"], + "outputFields": ["primitiveVars", "Cp", "velocity_in_km_per_hr"], "type": "lineProbe", }, "prb average": { @@ -557,7 +594,7 @@ def probe_output_config(): "start": [[10e-2, 10.02e-2, 10.03e-2]], "end": [[10e-2, 10.02e-2, 10.03e-2]], "numberOfPoints": [1], - "outputFields": ["primitiveVars", "Cp", "T"], + "outputFields": ["primitiveVars", "Cp", "T", "velocity_in_km_per_hr"], "type": "lineProbe", }, }, @@ -567,7 +604,7 @@ def probe_output_config(): @pytest.fixture() -def probe_output_with_point_array(): +def probe_output_with_point_array(vel_in_km_per_hr): return ( [ ProbeOutput( @@ -586,7 +623,7 @@ def probe_output_with_point_array(): number_of_points=7, ), ], - output_fields=["primitiveVars", "Cp"], + output_fields=["primitiveVars", "Cp", vel_in_km_per_hr], ), ProbeOutput( name="prb point", @@ -600,7 +637,7 @@ def probe_output_with_point_array(): location=[0.0001, 0.02, 0.03] * u.m, ), ], - output_fields=["primitiveVars", "Cp"], + output_fields=["primitiveVars", "Cp", vel_in_km_per_hr], ), ProbeOutput( name="prb mix", @@ -616,7 +653,7 @@ def probe_output_with_point_array(): number_of_points=5, ), ], - output_fields=["primitiveVars", "Cp"], + output_fields=["primitiveVars", "Cp", vel_in_km_per_hr], ), ], { @@ -625,7 +662,7 @@ def probe_output_with_point_array(): "start": [[0.1, 0.2, 0.3], [0.1, 0.2, 0.3]], "end": [[1.1, 1.2, 1.3], [1.3, 1.5, 1.7]], "numberOfPoints": [5, 7], - "outputFields": ["primitiveVars", "Cp"], + "outputFields": ["primitiveVars", "Cp", "velocity_in_km_per_hr"], "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, @@ -635,7 +672,7 @@ def probe_output_with_point_array(): "start": [[1e-2, 1.02e-2, 0.0003], [0.0001, 0.02, 0.03]], "end": [[1e-2, 1.02e-2, 0.0003], [0.0001, 0.02, 0.03]], "numberOfPoints": [1, 1], - "outputFields": ["primitiveVars", "Cp"], + "outputFields": ["primitiveVars", "Cp", "velocity_in_km_per_hr"], "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, @@ -645,7 +682,7 @@ def probe_output_with_point_array(): "start": [[0.1, 0.2, 0.3], [1e-2, 1.02e-2, 0.0003]], "end": [[1.1, 1.2, 1.3], [1e-2, 1.02e-2, 0.0003]], "numberOfPoints": [5, 1], - "outputFields": ["primitiveVars", "Cp"], + "outputFields": ["primitiveVars", "Cp", "velocity_in_km_per_hr"], "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, @@ -658,7 +695,7 @@ def probe_output_with_point_array(): @pytest.fixture() -def surface_integral_output_config(): +def surface_integral_output_config(vel_in_km_per_hr): return ( [ SurfaceIntegralOutput( # Local @@ -667,7 +704,7 @@ def surface_integral_output_config(): Surface(name="surface1", private_attribute_full_name="zoneName/surface1"), Surface(name="surface2"), ], - output_fields=["My_field_1"], + output_fields=["My_field_1", vel_in_km_per_hr], ), SurfaceIntegralOutput( name="prb 122", @@ -675,7 +712,7 @@ def surface_integral_output_config(): Surface(name="surface21"), Surface(name="surface22"), ], - output_fields=["My_field_2"], + output_fields=["My_field_2", vel_in_km_per_hr], ), # Local ], { @@ -684,7 +721,7 @@ def surface_integral_output_config(): "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, - "outputFields": ["My_field_1"], + "outputFields": ["My_field_1", "velocity_in_km_per_hr"], "surfaces": ["zoneName/surface1", "surface2"], "type": "surfaceIntegral", }, @@ -692,7 +729,7 @@ def surface_integral_output_config(): "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, - "outputFields": ["My_field_2"], + "outputFields": ["My_field_2", "velocity_in_km_per_hr"], "surfaces": ["surface21", "surface22"], "type": "surfaceIntegral", }, @@ -702,7 +739,7 @@ def surface_integral_output_config(): ) -def test_surface_probe_output(): +def test_surface_probe_output(vel_in_km_per_hr): param_with_ref = ( [ SurfaceProbeOutput( @@ -715,7 +752,7 @@ def test_surface_probe_output(): Surface(name="surface1", private_attribute_full_name="zoneA/surface1"), Surface(name="surface2", private_attribute_full_name="zoneA/surface2"), ], - output_fields=["Cp", "Cf"], + output_fields=["Cp", "Cf", vel_in_km_per_hr], ), TimeAverageSurfaceProbeOutput( name="SP-2", @@ -728,7 +765,7 @@ def test_surface_probe_output(): Surface(name="surface1", private_attribute_full_name="zoneB/surface1"), Surface(name="surface2", private_attribute_full_name="zoneB/surface2"), ], - output_fields=["Mach", "primitiveVars", "yPlus"], + output_fields=["Mach", "primitiveVars", "yPlus", vel_in_km_per_hr], ), SurfaceProbeOutput( name="SP-3", @@ -750,7 +787,7 @@ def test_surface_probe_output(): Surface(name="surface1", private_attribute_full_name="zoneC/surface1"), Surface(name="surface2", private_attribute_full_name="zoneC/surface2"), ], - output_fields=["Mach", "primitiveVars", "yPlus", "my_own_field"], + output_fields=["Mach", "primitiveVars", "yPlus", "my_own_field", vel_in_km_per_hr], ), ], { @@ -759,7 +796,7 @@ def test_surface_probe_output(): "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, - "outputFields": ["Cp", "Cf"], + "outputFields": ["Cp", "Cf", "velocity_in_km_per_hr"], "surfacePatches": ["zoneA/surface1", "zoneA/surface2"], "start": [[1e-2, 1.02e-2, 0.0003], [2.0, 1.01, 0.03]], "end": [[1e-2, 1.02e-2, 0.0003], [2.0, 1.01, 0.03]], @@ -773,7 +810,7 @@ def test_surface_probe_output(): "animationFrequencyTimeAverageOffset": 0, "startAverageIntegrationStep": -1, "computeTimeAverages": True, - "outputFields": ["Mach", "primitiveVars", "yPlus"], + "outputFields": ["Mach", "primitiveVars", "yPlus", "velocity_in_km_per_hr"], "surfacePatches": ["zoneB/surface1", "zoneB/surface2"], "start": [ [1e-2, 1.02e-2, 0.0003], @@ -792,7 +829,13 @@ def test_surface_probe_output(): "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, - "outputFields": ["Mach", "primitiveVars", "yPlus", "my_own_field"], + "outputFields": [ + "Mach", + "primitiveVars", + "yPlus", + "my_own_field", + "velocity_in_km_per_hr", + ], "surfacePatches": ["zoneC/surface1", "zoneC/surface2"], "start": [[0.1, 0.2, 0.3], [0.1, 0.2, 0.3]], "end": [[1.1, 1.2, 1.3], [1.3, 1.5, 1.7]], @@ -888,14 +931,14 @@ def test_monitor_output( "start": [[1e-2, 1.02e-2, 0.0003], [0.0001, 0.02, 0.03]], "end": [[1e-2, 1.02e-2, 0.0003], [0.0001, 0.02, 0.03]], "numberOfPoints": [1, 1], - "outputFields": ["primitiveVars", "Cp"], + "outputFields": ["primitiveVars", "Cp", "velocity_in_km_per_hr"], "type": "lineProbe", }, "prb 110": { "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, - "outputFields": ["My_field_1"], + "outputFields": ["My_field_1", "velocity_in_km_per_hr"], "surfaces": ["zoneName/surface1", "surface2"], "type": "surfaceIntegral", }, @@ -906,14 +949,14 @@ def test_monitor_output( "start": [[10e-2, 10.02e-2, 10.03e-2]], "end": [[10e-2, 10.02e-2, 10.03e-2]], "numberOfPoints": [1], - "outputFields": ["primitiveVars", "Cp"], + "outputFields": ["primitiveVars", "Cp", "velocity_in_km_per_hr"], "type": "lineProbe", }, "prb 122": { "animationFrequency": 1, "animationFrequencyOffset": 0, "computeTimeAverages": False, - "outputFields": ["My_field_2"], + "outputFields": ["My_field_2", "velocity_in_km_per_hr"], "surfaces": ["surface21", "surface22"], "type": "surfaceIntegral", }, @@ -927,7 +970,7 @@ def test_monitor_output( "start": [[10e-2, 10.02e-2, 10.03e-2]], "end": [[10e-2, 10.02e-2, 10.03e-2]], "numberOfPoints": [1], - "outputFields": ["primitiveVars", "Cp", "T"], + "outputFields": ["primitiveVars", "Cp", "T", "velocity_in_km_per_hr"], "type": "lineProbe", }, }, @@ -1015,7 +1058,7 @@ def test_acoustic_output(aeroacoustic_output_config, aeroacoustic_output_permeab ) -def test_surface_slice_output(): +def test_surface_slice_output(vel_in_km_per_hr): param_with_ref = ( [ SurfaceSliceOutput( @@ -1028,7 +1071,7 @@ def test_surface_slice_output(): Surface(name="surface1", private_attribute_full_name="zoneA/surface1"), Surface(name="surface2", private_attribute_full_name="zoneA/surface2"), ], - output_fields=["Cp", "Cf", "primitiveVars"], + output_fields=["Cp", "Cf", "primitiveVars", vel_in_km_per_hr], frequency=2, ), SurfaceSliceOutput( @@ -1042,7 +1085,7 @@ def test_surface_slice_output(): Surface(name="surface1", private_attribute_full_name="zoneB/surface1"), Surface(name="surface2", private_attribute_full_name="zoneB/surface2"), ], - output_fields=["Mach", "primitiveVars", "yPlus"], + output_fields=["Mach", "primitiveVars", "yPlus", vel_in_km_per_hr], ), ], { @@ -1055,7 +1098,7 @@ def test_surface_slice_output(): "name": "S1", "sliceOrigin": [0.01, 0.0102, 0.0003], "sliceNormal": [0.0, 1.0, 0.0], - "outputFields": ["Cp", "Cf", "primitiveVars"], + "outputFields": ["Cp", "Cf", "primitiveVars", "velocity_in_km_per_hr"], "surfacePatches": ["zoneA/surface1", "zoneA/surface2"], "animationFrequency": 1, "animationFrequencyOffset": 0, @@ -1065,7 +1108,7 @@ def test_surface_slice_output(): "name": "S3", "sliceOrigin": [0.01, 0.0101, 0.0003], "sliceNormal": [0.0, 1.0, 0.0], - "outputFields": ["Cp", "Cf", "primitiveVars"], + "outputFields": ["Cp", "Cf", "primitiveVars", "velocity_in_km_per_hr"], "surfacePatches": ["zoneA/surface1", "zoneA/surface2"], "animationFrequency": 1, "animationFrequencyOffset": 0, @@ -1075,7 +1118,7 @@ def test_surface_slice_output(): "name": "P1", "sliceOrigin": [0.01, 0.0102, 0.0003], "sliceNormal": [0.0, 0.0, 1.0], - "outputFields": ["Mach", "primitiveVars", "yPlus"], + "outputFields": ["Mach", "primitiveVars", "yPlus", "velocity_in_km_per_hr"], "surfacePatches": ["zoneB/surface1", "zoneB/surface2"], "animationFrequency": 1, "animationFrequencyOffset": 0, @@ -1085,7 +1128,7 @@ def test_surface_slice_output(): "name": "P2", "sliceOrigin": [2.0, 1.01, 0.03], "sliceNormal": [0.0, 0.0, -1.0], - "outputFields": ["Mach", "primitiveVars", "yPlus"], + "outputFields": ["Mach", "primitiveVars", "yPlus", "velocity_in_km_per_hr"], "surfacePatches": ["zoneB/surface1", "zoneB/surface2"], "animationFrequency": 1, "animationFrequencyOffset": 0, @@ -1095,7 +1138,7 @@ def test_surface_slice_output(): "name": "P3", "sliceOrigin": [3.0, 1.02, 0.03], "sliceNormal": [0.0, 0.0, 1.0], - "outputFields": ["Mach", "primitiveVars", "yPlus"], + "outputFields": ["Mach", "primitiveVars", "yPlus", "velocity_in_km_per_hr"], "surfacePatches": ["zoneB/surface1", "zoneB/surface2"], "animationFrequency": 1, "animationFrequencyOffset": 0, @@ -1111,17 +1154,21 @@ def test_surface_slice_output(): translated = {"boundaries": {}} translated = translate_output(param, translated) - print(json.dumps(translated, indent=4)) assert sorted(param_with_ref[1].items()) == sorted(translated["surfaceSliceOutput"].items()) -def test_dimensioned_output_fields_translation(): +def test_dimensioned_output_fields_translation(vel_in_km_per_hr): """Test the translation of output fields from user-facing fields to solver fields.""" with SI_unit_system: + water = Water( + name="h2o", density=1000 * u.kg / u.m**3, dynamic_viscosity=0.001 * u.kg / u.m / u.s + ) param = SimulationParams( - operating_condition=AerospaceCondition( - velocity_magnitude=100.0 * u.m / u.s, + operating_condition=LiquidOperatingCondition( + velocity_magnitude=50 * u.m / u.s, + reference_velocity_magnitude=100 * u.m / u.s, + material=water, ), outputs=[ VolumeOutput( @@ -1137,6 +1184,7 @@ def test_dimensioned_output_fields_translation(): "velocity_z_m_per_s", "pressure", "pressure_pa", + vel_in_km_per_hr, ], ), SurfaceOutput( @@ -1176,6 +1224,7 @@ def test_dimensioned_output_fields_translation(): "velocity_z_m_per_s", "pressure", "pressure_pa", + "velocity_in_km_per_hr", ] expected_fields_s = [ @@ -1190,89 +1239,65 @@ def test_dimensioned_output_fields_translation(): ref = { "userDefinedFields": [ - {"name": "pressure", "expression": "pressure = primitiveVars[4];"}, + {"name": "my_field", "expression": "1+1"}, { - "name": "velocity_m_per_s", - "expression": "double velocity[3];" - "velocity[0] = primitiveVars[1];" - "velocity[1] = primitiveVars[2];" - "velocity[2] = primitiveVars[3];" - "velocity_m_per_s[0] = velocity[0] * 340.29400580821283;" - "velocity_m_per_s[1] = velocity[1] * 340.29400580821283;" - "velocity_m_per_s[2] = velocity[2] * 340.29400580821283;", - }, - { - "name": "wall_shear_stress_magnitude", - "expression": "wall_shear_stress_magnitude = magnitude(wallShearStress);", + "name": "pressure", + "expression": "double gamma = 1.4;pressure = (usingLiquidAsMaterial) ? (primitiveVars[4] - 1.0 / gamma) * (velocityScale * velocityScale) : primitiveVars[4];", }, { - "name": "velocity_magnitude", - "expression": "double velocity[3]" - "velocity[0] = primitiveVars[1]" - "velocity[1] = primitiveVars[2]" - "velocity[2] = primitiveVars[3]" - "velocity_magnitude = magnitude(velocity)", + "name": "pressure_pa", + "expression": "double pressure;double gamma = 1.4;pressure = (usingLiquidAsMaterial) ? (primitiveVars[4] - 1.0 / gamma) * (velocityScale * velocityScale) : primitiveVars[4];pressure_pa = pressure * 999999999.9999999;", }, { "name": "velocity", - "expression": "velocity[0] = primitiveVars[1]" - "velocity[1] = primitiveVars[2]" - "velocity[2] = primitiveVars[3]", + "expression": "velocity[0] = primitiveVars[1] * velocityScale;velocity[1] = primitiveVars[2] * velocityScale;velocity[2] = primitiveVars[3] * velocityScale;", }, { - "name": "wall_shear_stress_magnitude_pa", - "expression": "double wall_shear_stress_magnitude" - "wall_shear_stress_magnitude = magnitude(wallShearStress)" - "wall_shear_stress_magnitude_pa = wall_shear_stress_magnitude * 141855.012726525", + "name": "velocity_in_km_per_hr", + "expression": "double ___velocity[3];___velocity[0] = primitiveVars[1] * velocityScale;___velocity[1] = primitiveVars[2] * velocityScale;___velocity[2] = primitiveVars[3] * velocityScale;velocity_in_km_per_hr[0] = (___velocity[0] * 3600.0); velocity_in_km_per_hr[1] = (___velocity[1] * 3600.0); velocity_in_km_per_hr[2] = (___velocity[2] * 3600.0);", }, { - "name": "velocity_y_m_per_s", - "expression": "double velocity_y" - "velocity_y = primitiveVars[2]" - "velocity_y_m_per_s = velocity_y * 340.29400580821283", + "name": "velocity_m_per_s", + "expression": "double velocity[3];velocity[0] = primitiveVars[1] * velocityScale;velocity[1] = primitiveVars[2] * velocityScale;velocity[2] = primitiveVars[3] * velocityScale;velocity_m_per_s[0] = velocity[0] * 1000.0;velocity_m_per_s[1] = velocity[1] * 1000.0;velocity_m_per_s[2] = velocity[2] * 1000.0;", }, { - "name": "velocity_x_m_per_s", - "expression": "double velocity_x" - "velocity_x = primitiveVars[1]" - "velocity_x_m_per_s = velocity_x * 340.29400580821283", + "name": "velocity_magnitude", + "expression": "double velocity[3];velocity[0] = primitiveVars[1];velocity[1] = primitiveVars[2];velocity[2] = primitiveVars[3];velocity_magnitude = magnitude(velocity) * velocityScale;", }, { "name": "velocity_magnitude_m_per_s", - "expression": "double velocity_magnitude" - "double velocity[3]" - "velocity[0] = primitiveVars[1]" - "velocity[1] = primitiveVars[2]" - "velocity[2] = primitiveVars[3]" - "velocity_magnitude = magnitude(velocity)" - "velocity_magnitude_m_per_s = velocity_magnitude * 340.29400580821283", + "expression": "double velocity_magnitude;double velocity[3];velocity[0] = primitiveVars[1];velocity[1] = primitiveVars[2];velocity[2] = primitiveVars[3];velocity_magnitude = magnitude(velocity) * velocityScale;velocity_magnitude_m_per_s = velocity_magnitude * 1000.0;", }, { - "name": "pressure_pa", - "expression": "double pressure" - "pressure = primitiveVars[4]" - "pressure_pa = pressure * 141855.012726525", + "name": "velocity_x_m_per_s", + "expression": "double velocity_x;velocity_x = primitiveVars[1] * velocityScale;velocity_x_m_per_s = velocity_x * 1000.0;", }, { - "name": "velocity_z_m_per_s", - "expression": "double velocity_z" - "velocity_z = primitiveVars[3]" - "velocity_z_m_per_s = velocity_z * 340.29400580821283", + "name": "velocity_y_m_per_s", + "expression": "double velocity_y;velocity_y = primitiveVars[2] * velocityScale;velocity_y_m_per_s = velocity_y * 1000.0;", }, { - "name": "my_field", - "expression": "1+1", + "name": "velocity_z_m_per_s", + "expression": "double velocity_z;velocity_z = primitiveVars[3] * velocityScale;velocity_z_m_per_s = velocity_z * 1000.0;", }, { - "expression": "vorticity_y = gradPrimitive[1][2] - gradPrimitive[3][0];", "name": "vorticity_y", + "expression": "vorticity_y = (gradPrimitive[1][2] - gradPrimitive[3][0]) * velocityScale;", + }, + { + "name": "wall_shear_stress_magnitude", + "expression": "wall_shear_stress_magnitude = magnitude(wallShearStress) * (velocityScale * velocityScale);", + }, + { + "name": "wall_shear_stress_magnitude_pa", + "expression": "double wall_shear_stress_magnitude;wall_shear_stress_magnitude = magnitude(wallShearStress) * (velocityScale * velocityScale);wall_shear_stress_magnitude_pa = wall_shear_stress_magnitude * 999999999.9999999;", }, ] } - solver_user_defined_fields = {} - solver_user_defined_fields["userDefinedFields"] = solver_json["userDefinedFields"] - assert sorted(solver_user_defined_fields) == sorted(ref) + translated_udfs = sorted(solver_json["userDefinedFields"], key=lambda x: x["name"]) + ref_udfs = sorted(ref["userDefinedFields"], key=lambda x: x["name"]) + assert compare_values(translated_udfs, ref_udfs) @pytest.fixture() diff --git a/tests/simulation/translator/test_solver_translator.py b/tests/simulation/translator/test_solver_translator.py index a99ee9254..3beeac723 100644 --- a/tests/simulation/translator/test_solver_translator.py +++ b/tests/simulation/translator/test_solver_translator.py @@ -48,10 +48,14 @@ VolumeOutput, ) from flow360.component.simulation.primitives import ReferenceGeometry, Surface +from flow360.component.simulation.services import ValidationCalledBy, validate_model from flow360.component.simulation.simulation_params import SimulationParams from flow360.component.simulation.time_stepping.time_stepping import RampCFL, Steady from flow360.component.simulation.translator.solver_translator import get_solver_json from flow360.component.simulation.unit_system import SI_unit_system +from flow360.component.simulation.user_code.core.types import UserVariable +from flow360.component.simulation.user_code.functions import math +from flow360.component.simulation.user_code.variables import solution from tests.simulation.translator.utils.actuator_disk_param_generator import ( actuator_disk_create_param, ) @@ -501,6 +505,24 @@ def test_user_defined_field(): ) translate_and_compare(param, mesh_unit=1 * u.m, ref_json_file="Flow360_udf.json") + with SI_unit_system: + param = SimulationParams( + operating_condition=AerospaceCondition.from_mach( + mach=0.84, + ), + outputs=[ + VolumeOutput( + name="output", + output_fields=[ + solution.Mach, + solution.velocity, + UserVariable(name="uuu", value=solution.velocity), + ], + ) + ], + ) + translate_and_compare(param, mesh_unit=1 * u.m, ref_json_file="Flow360_expression_udf.json") + def test_boundaries(): operating_condition = AerospaceCondition.from_mach( @@ -618,3 +640,61 @@ def test_liquid_simulation_translation(): # Flow360 time to seconds = 1m/(200m/s) = 0.005 s # t_seconds = (0.005 s * t) translate_and_compare(param, mesh_unit=1 * u.m, ref_json_file="Flow360_liquid_rotation_dd.json") + + +import flow360.component.simulation.user_code.core.context as context + + +@pytest.fixture() +def reset_context(): + context.default_context._values = { + name: item for (name, item) in context.default_context._values.items() if "." in name + } + + +def test_param_with_user_variables(): + some_dependent_variable_a = UserVariable( + name="some_dependent_variable_a", value=[1.0 * u.m / u.s, 2.0 * u.m / u.s, 3.0 * u.m / u.s] + ) + my_var = UserVariable( + name="my_var", value=math.cross(some_dependent_variable_a, solution.velocity) + ) + my_time_stepping_var = UserVariable(name="my_time_stepping_var", value=1.0 * u.s) + with SI_unit_system: + param = SimulationParams( + operating_condition=LiquidOperatingCondition( + velocity_magnitude=10 * u.m / u.s, + alpha=5 * u.deg, + beta=2 * u.deg, + material=Water(name="my_water", density=1.000 * 10**3 * u.kg / u.m**3), + ), + models=[ + Wall(entities=Surface(name="fluid/body")), + Freestream(entities=Surface(name="fluid/farfield")), + ], + outputs=[ + VolumeOutput( + name="output", + output_fields=[ + solution.Mach, + solution.velocity, + UserVariable(name="uuu", value=solution.velocity).in_unit(new_unit="km/ms"), + my_var, + ], + ) + ], + time_stepping=Unsteady(step_size=my_time_stepping_var + 0.5 * u.s, steps=123), + ) + # Mimicking real workflow where the Param is serialized and then deserialized + params_validated, _, _ = validate_model( + params_as_dict=param.model_dump(mode="json"), + validated_by=ValidationCalledBy.LOCAL, + root_item_type=None, + ) + + assert params_validated + translate_and_compare( + params_validated, + mesh_unit=1 * u.m, + ref_json_file="Flow360_user_variable.json", + ) diff --git a/tests/test_current_flow360_version.py b/tests/test_current_flow360_version.py index 38d6c0368..9b8e536ad 100644 --- a/tests/test_current_flow360_version.py +++ b/tests/test_current_flow360_version.py @@ -2,4 +2,4 @@ def test_version(): - assert __version__ == "25.6.0b1" + assert __version__ == "25.6.1b1"