Skip to content

Fix pylint for ALL simulation files. #330

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/codestyle.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,4 @@ jobs:
- name: Install dependencies
run: poetry install
- name: Run pylint
run: poetry run pylint flow360 --rcfile .pylintrc
run: poetry run pylint $(git ls-files "flow360/*.py") --rcfile .pylintrc
2 changes: 1 addition & 1 deletion .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ ignore=tests, flow360/examples

# Add files or directories matching the regex patterns to the ignore-list. The
# regex matches against paths and can be in Posix or Windows format.
ignore-paths=tests
ignore-paths=tests, flow360/examples, flow360/component/simulation/user_story_examples, flow360/component/results

# Files or directories matching the regex patterns are skipped. The regex
# matches against base names, not paths. The default value ignores Emacs file
Expand Down
37 changes: 25 additions & 12 deletions flow360/component/simulation/framework/base_model.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
"""Flow360BaseModel class definition."""

from __future__ import annotations

import hashlib
import json
from copy import deepcopy
from typing import Any, List, Literal
from typing import Any, List

import pydantic as pd
import rich
Expand All @@ -23,6 +25,23 @@


def custom_to_camel(string: str) -> str:
"""
Convert a snake_case string to camelCase.

This function takes a snake_case string as input and converts it to camelCase.
It splits the input string by underscores, capitalizes the first letter of
each subsequent component (after the first one), and joins them together.

Parameters:
string (str): The input string in snake_case format.

Returns:
str: The converted string in camelCase format.

Example:
>>> custom_to_camel("example_snake_case")
'exampleSnakeCase'
"""
components = string.split("_")

camel_case_string = components[0]
Expand Down Expand Up @@ -81,15 +100,6 @@ def __pydantic_init_subclass__(cls, **kwargs) -> None:
super().__pydantic_init_subclass__(**kwargs) # Correct use of super
cls._generate_docstring()

"""Sets config for all :class:`Flow360BaseModel` objects.

Custom Configuration Options
---------------------
require_one_of: Optional[List[str]] = []
conflicting_fields: Optional[List[Conflicts]] = []
include_hash: bool = False
include_defaults_in_schema: bool = True
"""
model_config = ConfigDict(
##:: Pydantic kwargs
arbitrary_types_allowed=True, # ?
Expand Down Expand Up @@ -117,6 +127,7 @@ def __setattr__(self, name, value):
super().__setattr__(name, value)

@pd.model_validator(mode="before")
@classmethod
def one_of(cls, values):
"""
root validator for require one of
Expand All @@ -133,6 +144,7 @@ def one_of(cls, values):
return values

# pylint: disable=no-self-argument
# pylint: disable=duplicate-code
@pd.model_validator(mode="before")
def handle_conflicting_fields(cls, values):
"""
Expand Down Expand Up @@ -429,14 +441,14 @@ def append(self, params: Flow360BaseModel, overwrite: bool = False):
"""
additional_config = params.model_dump(exclude_unset=True, exclude_none=True)
for key, value in additional_config.items():
if self.__getattribute__(key) and not overwrite:
if self.key and not overwrite:
log.warning(
f'"{key}" already exist in the original model, skipping. Use overwrite=True to overwrite values.'
)
continue
is_frozen = self.model_fields[key].frozen
if is_frozen is None or is_frozen is False:
self.__setattr__(key, value)
setattr(self, key, value)

@classmethod
def _generate_docstring(cls) -> str:
Expand Down Expand Up @@ -506,6 +518,7 @@ def _generate_docstring(cls) -> str:
doc += "\n"
cls.__doc__ = doc

# pylint: disable=too-many-arguments
def _convert_dimensions_to_solver(
self,
params,
Expand Down
9 changes: 8 additions & 1 deletion flow360/component/simulation/framework/cached_model_base.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
""" Base class for cached models. """

import abc
import inspect
from functools import wraps
Expand All @@ -8,7 +10,10 @@
from flow360.component.simulation.framework.base_model import Flow360BaseModel


# pylint: disable=missing-function-docstring
class CachedModelBase(Flow360BaseModel, metaclass=abc.ABCMeta):
"""Base class for cached models."""

@classmethod
def model_constructor(cls, func: Callable) -> Callable:
@classmethod
Expand All @@ -21,9 +26,11 @@ def wrapper(cls, *args, **kwargs):
for k, v in sig.parameters.items()
if v.default is not inspect.Parameter.empty
}
# pylint: disable=protected-access
result._cached = result.__annotations__["_cached"](
**{**result._cached.model_dump(), **defaults, **kwargs}
)
# pylint: disable=protected-access
result._cached.constructor = func.__name__
return result

Expand All @@ -35,7 +42,7 @@ def __init__(self, **data):
if cached:
try:
self._cached = self.__annotations__["_cached"].model_validate(cached)
except:
except pd.ValidationError:
pass
else:
defaults = {name: field.default for name, field in self.model_fields.items()}
Expand Down
94 changes: 56 additions & 38 deletions flow360/component/simulation/framework/entity_base.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Base classes for entity types."""

from __future__ import annotations

import copy
Expand All @@ -12,7 +14,7 @@


class MergeConflictError(Exception):
pass
"""Raised when a merge conflict is detected."""


class EntityBase(Flow360BaseModel, metaclass=ABCMeta):
Expand All @@ -24,7 +26,8 @@ class EntityBase(Flow360BaseModel, metaclass=ABCMeta):
A string representing the specific type of the entity.
This should be set in subclasses to differentiate between entity types.
Warning:
This controls the granularity of the registry and must be unique for each entity type and it is **strongly recommended NOT** to change it as it will bring up compatability problems.
This controls the granularity of the registry and must be unique for each entity type
and it is **strongly recommended NOT** to change it as it will bring up compatability problems.

name (str):
The name of the entity instance, used for identification and retrieval.
Expand Down Expand Up @@ -64,11 +67,13 @@ def copy(self, update=None, **kwargs) -> EntityBase:
"""
if update is None:
raise ValueError(
"Change is necessary when calling .copy() as there cannot be two identical entities at the same time. Please use update parameter to change the entity attributes."
"Change is necessary when calling .copy() as there cannot be two identical entities at the same time. "
"Please use update parameter to change the entity attributes."
)
if "name" not in update or update["name"] == self.name:
raise ValueError(
"Copying an entity requires a new name to be specified. Please provide a new name in the update dictionary."
"Copying an entity requires a new name to be specified. "
"Please provide a new name in the update dictionary."
)
return super().copy(update=update, **kwargs)

Expand All @@ -82,14 +87,17 @@ def __eq__(self, other):

@property
def entity_bucket(self) -> str:
"""returns the bucket to which the entity belongs."""
return self.private_attribute_registry_bucket_name

@entity_bucket.setter
def entity_bucket(self, value: str):
"""disallow modification of the bucket to which the entity belongs."""
raise AttributeError("Cannot modify the bucket to which the entity belongs.")

@property
def entity_type(self) -> str:
"""returns the entity class name."""
return self.private_attribute_entity_type_name

@entity_type.setter
Expand Down Expand Up @@ -131,7 +139,7 @@ def __combine_bools(input_data):
if isinstance(input_data, bool):
return input_data
# If the input is a numpy ndarray, flatten it
elif isinstance(input_data, np.ndarray):
if isinstance(input_data, np.ndarray):
input_data = input_data.ravel()
# If the input is not a boolean or an ndarray, assume it's an iterable of booleans
return all(input_data)
Expand All @@ -151,15 +159,18 @@ def _merge_objects(obj_old: EntityBase, obj_new: EntityBase) -> EntityBase:
"Make sure merge is intended as the names of two entities are different."
)

if obj_new._is_generic() == False and obj_old._is_generic() == True:
# pylint: disable=protected-access
if obj_new._is_generic() is False and obj_old._is_generic() is True:
# swap so that obj_old is **non-generic** and obj_new is **generic**
obj_new, obj_old = obj_old, obj_new

# Check the two objects are mergeable
if obj_new._is_generic() == False and obj_old._is_generic() == False:
# pylint: disable=protected-access
if obj_new._is_generic() is False and obj_old._is_generic() is False:
if obj_new.__class__ != obj_old.__class__:
raise MergeConflictError(
f"Cannot merge objects of different class: {obj_old.__class__.__name__} and {obj_new.__class__.__name__}"
f"Cannot merge objects of different class: {obj_old.__class__.__name__} "
f"and {obj_new.__class__.__name__}"
)

for attr, value in obj_new.__dict__.items():
Expand Down Expand Up @@ -207,8 +218,10 @@ def _remove_duplicate_entities(expanded_entities: List[EntityBase]):
for name, entity_list in all_entities.items():
if len(entity_list) > 1:
# step 1: find one instance that is non-generic if any
base_index = 0
for base_index, entity in enumerate(entity_list):
if entity._is_generic() == False:
# pylint: disable=protected-access
if entity._is_generic() is False:
break
for index, entity in enumerate(entity_list):
if index == base_index:
Expand Down Expand Up @@ -271,18 +284,19 @@ def _get_valid_entity_types(cls):
raise TypeError("Internal error, the metaclass for EntityList is not properly set.")

@classmethod
def _valid_individual_input(cls, input):
def _valid_individual_input(cls, input_data):
"""Validate each individual element in a list or as standalone entity."""
if isinstance(input, str) or isinstance(input, EntityBase):
return input
else:
raise ValueError(
f"Type({type(input)}) of input to `entities` ({input}) is not valid. Expected str or entity instance."
)
if isinstance(input_data, (str, EntityBase)):
return input_data

raise ValueError(
f"Type({type(input_data)}) of input to `entities` ({input_data}) is not valid. "
"Expected str or entity instance."
)

@pd.model_validator(mode="before")
@classmethod
def _format_input_to_list(cls, input: Union[dict, list]):
def _format_input_to_list(cls, input_data: Union[dict, list]):
"""
Flatten List[EntityBase] and put into stored_entities.
"""
Expand All @@ -293,12 +307,12 @@ def _format_input_to_list(cls, input: Union[dict, list]):
# 3. EntityBase comes from direct specification of entity in the list.
formated_input = []
valid_types = cls._get_valid_entity_types()
if isinstance(input, list):
if input == []:
if isinstance(input_data, list):
if input_data == []:
raise ValueError("Invalid input type to `entities`, list is empty.")
for item in input:
for item in input_data:
if isinstance(item, list): # Nested list comes from assets
[cls._valid_individual_input(individual) for individual in item]
_ = [cls._valid_individual_input(individual) for individual in item]
formated_input.extend(
[
individual
Expand All @@ -310,16 +324,17 @@ def _format_input_to_list(cls, input: Union[dict, list]):
cls._valid_individual_input(item)
if isinstance(item, tuple(valid_types)):
formated_input.append(item)
elif isinstance(input, dict):
return dict(stored_entities=input["stored_entities"])
elif isinstance(input_data, dict):
return {"stored_entities": input_data["stored_entities"]}
# pylint: disable=no-else-return
else: # Single reference to an entity
if input is None:
return dict(stored_entities=None)
if input_data is None:
return {"stored_entities": None}
else:
cls._valid_individual_input(input)
if isinstance(input, tuple(valid_types)):
formated_input.append(input)
return dict(stored_entities=formated_input)
cls._valid_individual_input(input_data)
if isinstance(input_data, tuple(valid_types)):
formated_input.append(input_data)
return {"stored_entities": formated_input}

@pd.field_validator("stored_entities", mode="after")
@classmethod
Expand Down Expand Up @@ -364,20 +379,21 @@ def _get_expanded_entities(
if entities is None:
return None

# pylint: disable=protected-access
valid_types = self.__class__._get_valid_entity_types()

expanded_entities = []

# pylint: disable=not-an-iterable
for entity in entities:
if isinstance(entity, str):
# Expand from supplied registry
if supplied_registry is None:
if expect_supplied_registry == False:
if expect_supplied_registry is False:
continue
else:
raise ValueError(
f"Internal error, registry is not supplied for entity ({entity}) expansion."
)
raise ValueError(
f"Internal error, registry is not supplied for entity ({entity}) expansion."
)
# Expand based on naming pattern registered in the Registry
pattern_matched_entities = supplied_registry.find_by_naming_pattern(entity)
# Filter pattern matched entities by valid types
Expand All @@ -398,12 +414,14 @@ def _get_expanded_entities(
raise ValueError(
f"Failed to find any matching entity with {entities}. Please check the input to entities."
)
# TODO: As suggested by Runda. We better prompt user what entities are actually used/expanded to avoid user input error. We need a switch to turn it on or off.
if create_hard_copy == True:
# pylint: disable=fixme
# TODO: As suggested by Runda. We better prompt user what entities are actually used/expanded to
# TODO: avoid user input error. We need a switch to turn it on or off.
if create_hard_copy is True:
return copy.deepcopy(expanded_entities)
else:
return expanded_entities
return expanded_entities

# pylint: disable=arguments-differ
def preprocess(self, supplied_registry=None, **kwargs):
"""
Expand and overwrite self.stored_entities in preparation for submissin/serialization.
Expand Down
Loading
Loading