Skip to content

Hotfixes-24.11 #500 #518 and #487 #516

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Oct 28, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions examples/show_storage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
import flow360 as fl

fl.Env.preprod.active()


fl.Folder.print_storage()
1 change: 1 addition & 0 deletions flow360/component/flow360_params/flow360_output.py
Original file line number Diff line number Diff line change
Expand Up @@ -664,6 +664,7 @@ def update_model(self) -> Flow360BaseModel:
"outputFields": fields,
"startAverageIntegrationStep": self.start_average_integration_step,
"surfaces": self.surfaces,
"writeSingleFile": self.write_single_file,
}

return SurfaceOutput.parse_obj(model)
Expand Down
170 changes: 169 additions & 1 deletion flow360/component/folder.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,9 @@
from ..log import log
from .interfaces import FolderInterface
from .resource_base import AssetMetaBaseModel, Flow360Resource, ResourceDraft
from .utils import shared_account_confirm_proceed, validate_type
from .utils import shared_account_confirm_proceed, storage_size_formatter, validate_type

ROOT_FOLDER = "ROOT.FLOW360"


# pylint: disable=E0213
Expand Down Expand Up @@ -163,6 +165,172 @@ def create(cls, name: str, tags: List[str] = None, parent_folder: Folder = None)
)
return new_folder

def get_items(self):
"""
Fetch all items within the current folder, handling pagination if needed.

Returns
-------
list
A list of all items found in the folder, sorted by storage size in descending order.
"""

all_records = []
page = 0
size = 1000 # Page size
total_record_count = size

# Loop until all pages are fetched
while len(all_records) < total_record_count:
payload = {
"page": page,
"size": size,
"filterFolderIds": self.id,
"filterExcludeSubfolders": True,
"sortFields": ["storageSize"],
"sortDirections": ["desc"],
"expandFields": ["contentInfo"],
}

data = RestApi("/v2/items").get(params=payload)
records = data.get("records", [])
all_records.extend(records)
total_record_count = data.get("total", 0)
page += 1

return all_records

def _build_folder_tree(self, folders):
"""
Build a hierarchical folder tree starting from the current folder.

Parameters
----------
folders : list
A list of folder records.

Returns
-------
dict
A dictionary representing the folder hierarchy with nested subfolders.
"""

folder_dict = {folder["id"]: folder for folder in folders}
folder_dict[ROOT_FOLDER] = {"id": ROOT_FOLDER, "name": "My workspace"}

for folder in folder_dict.values():
folder["subfolders"] = []

for folder in folders:
parent_id = folder.get("parentFolderId")
if parent_id is not None:
parent_folder = folder_dict.get(parent_id)
if parent_folder:
parent_folder["subfolders"].append(
{"name": folder["name"], "id": folder["id"], "subfolders": []}
)

def build_hierarchy(folder_id):
folder = folder_dict.get(folder_id)
if not folder:
return None

return {
"name": folder["name"],
"id": folder["id"],
"subfolders": [
build_hierarchy(subfolder["id"]) for subfolder in folder["subfolders"]
],
}

return build_hierarchy(self.id)

def get_folder_tree(self):
"""
Retrieve the folder tree including subfolders from the API.

Returns
-------
dict
A hierarchical representation of the folder tree starting from the current folder.
"""

payload = {
"includeSubfolders": True,
"page": 0,
"size": 1000,
} # it assumes user will not have more than 1000 folders
data = RestApi("/v2/folders").get(params=payload)
folder_tree = self._build_folder_tree(data["records"])
return folder_tree

def _print_storage(self, tree, indent: int, n_display: int):
"""
Recursively print the folder tree along with its contents and total storage usage.

Parameters
----------
tree : dict
The current folder tree to display.
indent : int
The indentation level for pretty-printing.
n_display : int
The number of items to display before summarizing the remaining items.

Returns
-------
int
The total storage size of the current folder and its subfolders.
"""

log.info(" " * indent + f"- [FOLDER] {tree['name']}")
total_storage = 0
for subfolder in tree["subfolders"]:
# pylint: disable=protected-access
total_storage += Folder(subfolder["id"])._print_storage(
subfolder, indent + 1, n_display
)

items = self.get_items()
displayed_items = items[:n_display]
remaining_items = items[n_display:]

for item in displayed_items:
if item["type"] != "Folder":
storage_size = item.get("storageSize", 0)
total_storage += storage_size
log.info(
" " * (indent + 1)
+ f"- [{item['type']}] {item['name']} (Size: {storage_size_formatter(storage_size)})"
)

if len(remaining_items) > 0:
total_remaining_size = sum(item.get("storageSize", 0) for item in remaining_items)
log.info(
" " * (indent + 1)
+ f"+{len(remaining_items)} more (total {storage_size_formatter(total_remaining_size)})"
)
total_storage += total_remaining_size

log.info(" " * (indent + 1) + f"Total Storage: {storage_size_formatter(total_storage)}")
return total_storage

@classmethod
def print_storage(cls, folder_id: str = "ROOT.FLOW360", n_display: int = 10) -> None:
"""
Display the storage details of a folder, including subfolders and a summary of all items.

Parameters
----------
folder_id : str, optional
The ID of the folder to print storage details for. Defaults to "ROOT.FLOW360".
n_display : int, optional
The number of items to display before summarizing the remaining items. Defaults to 10.
"""
folder = cls(id=folder_id)
tree = folder.get_folder_tree()
folder._print_storage(tree, 0, n_display)


# FOLDER LIST uses different endpoint, requires separate implementation

Expand Down
6 changes: 3 additions & 3 deletions flow360/component/simulation/models/volume_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ class ActuatorDisk(Flow360BaseModel):
Note that `center`, `axis_thrust`, `thickness` can be acquired from `entity` so they are not required anymore.
"""

entities: Optional[EntityList[Cylinder]] = pd.Field(None, alias="volumes")
entities: EntityList[Cylinder] = pd.Field(alias="volumes")
force_per_area: ForcePerArea = pd.Field()
name: Optional[str] = pd.Field(None)
type: Literal["ActuatorDisk"] = pd.Field("ActuatorDisk", frozen=True)
Expand Down Expand Up @@ -236,7 +236,7 @@ class BETDisk(Flow360BaseModel):

name: Optional[str] = pd.Field(None)
type: Literal["BETDisk"] = pd.Field("BETDisk", frozen=True)
entities: Optional[EntityList[Cylinder]] = pd.Field(None, alias="volumes")
entities: EntityList[Cylinder] = pd.Field(alias="volumes")

rotation_direction_rule: Literal["leftHand", "rightHand"] = pd.Field("rightHand")
number_of_blades: pd.StrictInt = pd.Field(gt=0, le=10)
Expand Down Expand Up @@ -332,7 +332,7 @@ class PorousMedium(Flow360BaseModel):

name: Optional[str] = pd.Field(None)
type: Literal["PorousMedium"] = pd.Field("PorousMedium", frozen=True)
entities: Optional[EntityList[GenericVolume, Box]] = pd.Field(None, alias="volumes")
entities: EntityList[GenericVolume, Box] = pd.Field(alias="volumes")

darcy_coefficient: InverseAreaType.Point = pd.Field()
forchheimer_coefficient: InverseLengthType.Point = pd.Field()
Expand Down
2 changes: 1 addition & 1 deletion flow360/component/simulation/outputs/outputs.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ class SurfaceProbeOutput(Flow360BaseModel):
"""

name: str = pd.Field()
entities: EntityList[Point, PointArray] = pd.Field(None, alias="probe_points")
entities: EntityList[Point, PointArray] = pd.Field(alias="probe_points")
# Maybe add preprocess for this and by default add all Surfaces?
target_surfaces: EntityList[Surface] = pd.Field()

Expand Down
24 changes: 24 additions & 0 deletions flow360/component/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ def _valid_resource_id(resource_id) -> bool:
pattern = re.compile(
r"""
^ # Start of the string
ROOT\.FLOW360| # accept root folder
(?P<content> # Start of the content group
[0-9a-zA-Z,-]{16,} # Content: at least 16 characters, alphanumeric, comma, or dash
)$ # End of the string
Expand Down Expand Up @@ -529,3 +530,26 @@ def is_valid_surface_mesh(self):
# pylint: disable=missing-function-docstring
def is_valid_volume_mesh(self):
return self.format in [MeshFileFormat.UGRID, MeshFileFormat.CGNS]


def storage_size_formatter(size_in_bytes):
"""
Format the size in bytes into a human-readable format (B, kB, MB, GB).

Parameters
----------
size_in_bytes : int
The size in bytes to be formatted.

Returns
-------
str
A string representing the size in the most appropriate unit (B, kB, MB, GB).
"""
if size_in_bytes < 1024:
return f"{size_in_bytes} B"
if size_in_bytes < 1024**2:
return f"{size_in_bytes / 1024:.2f} kB"
if size_in_bytes < 1024**3:
return f"{size_in_bytes / (1024 ** 2):.2f} MB"
return f"{size_in_bytes / (1024 ** 3):.2f} GB"
3 changes: 2 additions & 1 deletion tests/data/cases/case_5.json
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@
"Cf": true,
"CfVec": true,
"yPlus": true,
"wallDistance": false
"wallDistance": false,
"writeSingleFile": true
},
"navierStokesSolver": {
"tolerance": 1e-10,
Expand Down
2 changes: 2 additions & 0 deletions tests/test_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,8 @@ def test_updater_from_files():
assert params.initial_condition == ExpressionInitialCondition(
constants={"gamma": "1.4"}, rho=1, u="x", v=0, w=0, p="1/gamma"
)
assert params.surface_output.write_single_file is True

params = fl.Flow360Params("data/cases/case_7.json")
assert params.turbulence_model_solver.reconstruction_gradient_limiter == 1.0
assert params.turbulence_model_solver.model_constants is not None
Expand Down
Loading