From 539f5e433a1cd34d8f3139513f0d23fde6113895 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Thu, 7 Aug 2025 09:59:04 +0200 Subject: [PATCH 01/13] :recycle: Refactoring Sample container --- .../containers/sample_container/__init__.py | 1 + .../sample_container/field_manager.py | 54 +++++++++++++++++++ .../sample_container/scalar_manager.py | 26 +++++++++ 3 files changed, 81 insertions(+) create mode 100644 src/plaid/containers/sample_container/__init__.py create mode 100644 src/plaid/containers/sample_container/field_manager.py create mode 100644 src/plaid/containers/sample_container/scalar_manager.py diff --git a/src/plaid/containers/sample_container/__init__.py b/src/plaid/containers/sample_container/__init__.py new file mode 100644 index 00000000..8a11c4d5 --- /dev/null +++ b/src/plaid/containers/sample_container/__init__.py @@ -0,0 +1 @@ +"""Implements the `Sample` container and features managers.""" diff --git a/src/plaid/containers/sample_container/field_manager.py b/src/plaid/containers/sample_container/field_manager.py new file mode 100644 index 00000000..bbee7753 --- /dev/null +++ b/src/plaid/containers/sample_container/field_manager.py @@ -0,0 +1,54 @@ +"""Module that implements the `FieldManager` class that holds the responsability of managing fields within a Sample.""" + +from plaid.types import CGNSTree, FieldType + + +class FieldManager: + """Manager object for scalars.""" + + def __init__(self): + self.features: dict[str, FieldType] = {} + + def add_field( + self, + name: str, + field: FieldType, + zone_name: str = None, + base_name: str = None, + location: str = "Vertex", + time: float = None, + ) -> None: + """Add field".""" + pass + + def remove_field( + self, + name: str, + zone_name: str = None, + base_name: str = None, + location: str = "Vertex", + time: float = None, + ) -> CGNSTree: + """Remove field.""" + pass + + def get_field( + self, + name: str, + zone_name: str = None, + base_name: str = None, + location: str = "Vertex", + time: float = None, + ) -> FieldType: + """Get field.""" + pass + + def get_field_names( + self, + zone_name: str = None, + base_name: str = None, + location: str = "Vertex", + time: float = None, + ) -> set[str]: + """Get all fields names.""" + pass diff --git a/src/plaid/containers/sample_container/scalar_manager.py b/src/plaid/containers/sample_container/scalar_manager.py new file mode 100644 index 00000000..fa758370 --- /dev/null +++ b/src/plaid/containers/sample_container/scalar_manager.py @@ -0,0 +1,26 @@ +"""Module that implements the `ScalarManager` class that holds the responsability of managing scalars within a Sample.""" + +from plaid.types import ScalarType + + +class ScalarManager: + """Manager object for scalars.""" + + def __init__(self): + self.features: dict[str, ScalarType] = {} + + def add(self, name: str, value: ScalarType) -> None: + """Add a scalar.""" + self.features[name] = value + + def remove(self, name: str) -> ScalarType: + """Remove a scalar.""" + return self.features.pop(name) + + def get(self, name: str) -> ScalarType: + """Get scalar.""" + return self.features[name] + + def get_names(self) -> set[str]: + """Get all the scalars names.""" + return sorted(self.features.keys()) From 9205bb6adb200e15ed43448c412f56d3a21e7e6a Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Fri, 22 Aug 2025 16:55:16 +0200 Subject: [PATCH 02/13] WIP --- src/plaid/containers/sample.py | 30 +++++++------------ .../sample_container/scalar_manager.py | 2 +- 2 files changed, 12 insertions(+), 20 deletions(-) diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index 739262d2..9e952ef0 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -60,11 +60,6 @@ level=logging.INFO, ) -# %% Globals - - -# %% Classes - def _check_names(names: Union[str, list[str]]): """Check that names do not contain invalid character ``/``. @@ -84,7 +79,7 @@ def _check_names(names: Union[str, list[str]]): ) -def read_index(pyTree: list, dim: list[int]): +def _read_index(pyTree: list, dim: list[int]): """Read Index Array or Index Range from CGNS. Args: @@ -94,12 +89,12 @@ def read_index(pyTree: list, dim: list[int]): Returns: indices """ - a = read_index_array(pyTree) - b = read_index_range(pyTree, dim) + a = _read_index_array(pyTree) + b = _read_index_range(pyTree, dim) return np.hstack((a, b)) -def read_index_array(pyTree: list): +def _read_index_array(pyTree: list): """Read Index Array from CGNS. Args: @@ -119,7 +114,7 @@ def read_index_array(pyTree: list): return np.array(res, dtype=int).ravel() -def read_index_range(pyTree: list, dim: list[int]): +def _read_index_range(pyTree: list, dim: list[int]): """Read Index Range from CGNS. Args: @@ -163,12 +158,9 @@ class Sample(BaseModel): def __init__( self, - directory_path: Union[str, Path] = None, + directory_path: Optional[Union[str, Path]] = None, mesh_base_name: str = "Base", mesh_zone_name: str = "Zone", - meshes: dict[float, CGNSTree] = None, - scalars: dict[str, ScalarType] = None, - time_series: dict[str, TimeSeriesType] = None, links: dict[float, list[LinkType]] = None, paths: dict[float, list[PathType]] = None, ) -> None: @@ -207,9 +199,9 @@ def __init__( self._mesh_base_name: str = mesh_base_name self._mesh_zone_name: str = mesh_zone_name - self._meshes: dict[float, CGNSTree] = meshes - self._scalars: dict[str, ScalarType] = scalars - self._time_series: dict[str, TimeSeriesType] = time_series + self._meshes: dict[float, CGNSTree] = {} + self._scalars: dict[str, ScalarType] = {} + self._time_series: dict[str, TimeSeriesType] = {} self._links: dict[float, list[LinkType]] = links self._paths: dict[float, list[PathType]] = paths @@ -1319,7 +1311,7 @@ def get_nodal_tags( for BCPath in BCPaths: BCNode = CGU.getNodeByPath(zone_node, BCPath) BCName = BCNode[0] - indices = read_index(BCNode, dim) + indices = _read_index(BCNode, dim) if len(indices) == 0: # pragma: no cover continue @@ -1340,7 +1332,7 @@ def get_nodal_tags( # if fnpath: # fn = CGU.getNodeByPath(ZSRNode, fnpath[0]) # familyName = CGU.getValueAsString(fn) - indices = read_index(ZSRNode, dim) + indices = _read_index(ZSRNode, dim) if len(indices) == 0: continue gl = CGU.getPathsByTypeSet(ZSRNode, ["GridLocation_t"])[0] diff --git a/src/plaid/containers/sample_container/scalar_manager.py b/src/plaid/containers/sample_container/scalar_manager.py index fa758370..e0d46ae5 100644 --- a/src/plaid/containers/sample_container/scalar_manager.py +++ b/src/plaid/containers/sample_container/scalar_manager.py @@ -3,7 +3,7 @@ from plaid.types import ScalarType -class ScalarManager: +class ScalarCollection: """Manager object for scalars.""" def __init__(self): From f42bf467e034ed4d39895bcb3ca8d0e1e402c831 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Tue, 26 Aug 2025 17:20:54 +0200 Subject: [PATCH 03/13] Defining collections --- src/plaid/containers/collections.py | 582 ++++++++++++++++++ src/plaid/containers/sample.py | 4 +- .../containers/sample_container/__init__.py | 1 - .../sample_container/field_manager.py | 54 -- .../sample_container/scalar_manager.py | 26 - 5 files changed, 584 insertions(+), 83 deletions(-) create mode 100644 src/plaid/containers/collections.py delete mode 100644 src/plaid/containers/sample_container/__init__.py delete mode 100644 src/plaid/containers/sample_container/field_manager.py delete mode 100644 src/plaid/containers/sample_container/scalar_manager.py diff --git a/src/plaid/containers/collections.py b/src/plaid/containers/collections.py new file mode 100644 index 00000000..51df3f63 --- /dev/null +++ b/src/plaid/containers/collections.py @@ -0,0 +1,582 @@ +"""Module for implementing collections.""" + +import logging +from typing import Optional, Union + +import CGNS.PAT.cgnskeywords as CGK +import CGNS.PAT.cgnslib as CGL +import CGNS.PAT.cgnsutils as CGU +import numpy as np + +from plaid.types import CGNSNode, CGNSTree, Field, Scalar +from plaid.utils import cgns_helper as CGH + +logger = logging.getLogger(__name__) +logging.basicConfig( + format="[%(asctime)s:%(levelname)s:%(filename)s:%(funcName)s(%(lineno)d)]:%(message)s", + level=logging.INFO, +) + + +def _check_names(names: Union[str, list[str]]): + """Check that names do not contain invalid character ``/``. + + Args: + names (Union[str, list[str]]): The names to check. + + Raises: + ValueError: If any name contains the invalid character ``/``. + """ + if isinstance(names, str): + names = [names] + for name in names: + if (name is not None) and ("/" in name): + raise ValueError( + f"feature_names containing `/` are not allowed, but {name=}, you should first replace any occurence of `/` with something else, for example: `name.replace('/','__')`" + ) + + +class ScalarCollection: + """Manager object for scalars.""" + + def __init__(self): + self.features: dict[str, Scalar] = {} + + def add(self, name: str, value: Scalar) -> None: + """Add a scalar value to a dictionary. + + Args: + name (str): The name of the scalar value. + value (Scalar): The scalar value to add or update in the dictionary. + """ + _check_names([name]) + if self._scalars is None: + self._scalars = {name: value} + else: + self._scalars[name] = value + + def remove(self, name: str) -> Scalar: + """Delete a scalar value from the dictionary. + + Args: + name (str): The name of the scalar value to be deleted. + + Raises: + KeyError: Raised when there is no scalar / there is no scalar with the provided name. + + Returns: + Scalar: The value of the deleted scalar. + """ + if self._scalars is None: + raise KeyError("There is no scalar inside this sample.") + + if name not in self._scalars: + raise KeyError(f"There is no scalar value with name {name}.") + + return self._scalars.pop(name) + + def get(self, name: str) -> Scalar: + """Retrieve a scalar value associated with the given name. + + Args: + name (str): The name of the scalar value to retrieve. + + Returns: + Scalar or None: The scalar value associated with the given name, or None if the name is not found. + """ + if (self._scalars is None) or (name not in self._scalars): + return None + else: + return self._scalars[name] + + def get_names(self) -> list[str]: + """Get a set of scalar names available in the object. + + Returns: + set[str]: A set containing the names of the available scalars. + """ + if self._scalars is None: + return [] + else: + res = sorted(self._scalars.keys()) + return res + + +class FieldCollection: + """Manager object for fields.""" + + def __init__(self): + self.features: dict[str, Field] = {} + self._defaults: dict = { + "active_base": None, + "active_zone": None, + "active_time": None, + } + + def add( + self, + name: str, + field: Field, + zone_name: Optional[str] = None, + base_name: Optional[str] = None, + location: str = "Vertex", + time: Optional[float] = None, + warning_overwrite=True, + ) -> None: + """Add a field to a specified zone in the grid. + + Args: + name (str): The name of the field to be added. + field (Field): The field data to be added. + zone_name (str, optional): The name of the zone where the field will be added. Defaults to None. + base_name (str, optional): The name of the base where the zone is located. Defaults to None. + location (str, optional): The grid location where the field will be stored. Defaults to 'Vertex'. + Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` + time (float, optional): The time associated with the field. Defaults to 0. + warning_overwrite (bool, optional): Show warning if an preexisting field is being overwritten + + Raises: + KeyError: Raised if the specified zone does not exist in the given base. + """ + _check_names([name]) + # init_tree will look for default time + self.init_tree(time) + + # get_zone will look for default zone_name, base_name and time + zone_node = self.get_zone(zone_name, base_name, time) + + if zone_node is None: + raise KeyError( + f"there is no Zone with name {zone_name} in base {base_name}. Did you check topological and physical dimensions ?" + ) + + # solution_paths = CGU.getPathsByTypeOrNameList(self._tree, '/.*/.*/FlowSolution_t') + solution_paths = CGU.getPathsByTypeSet(zone_node, "FlowSolution_t") + has_FlowSolution_with_location = False + if len(solution_paths) > 0: + for s_path in solution_paths: + val_location = ( + CGU.getValueByPath(zone_node, f"{s_path}/GridLocation") + .tobytes() + .decode() + ) + if val_location == location: + has_FlowSolution_with_location = True + + if not (has_FlowSolution_with_location): + CGL.newFlowSolution(zone_node, f"{location}Fields", gridlocation=location) + + solution_paths = CGU.getPathsByTypeSet(zone_node, "FlowSolution_t") + assert len(solution_paths) > 0 + + for s_path in solution_paths: + val_location = ( + CGU.getValueByPath(zone_node, f"{s_path}/GridLocation") + .tobytes() + .decode() + ) + + if val_location != location: + continue + + field_node = CGU.getNodeByPath(zone_node, f"{s_path}/{name}") + + if field_node is None: + flow_solution_node = CGU.getNodeByPath(zone_node, s_path) + CGL.newDataArray(flow_solution_node, name, np.asfortranarray(field)) + else: + if warning_overwrite: + logger.warning( + f"field node with name {name} already exists -> data will be replaced" + ) + CGU.setValue(field_node, np.asfortranarray(field)) + + def remove( + self, + name: str, + zone_name: Optional[str] = None, + base_name: Optional[str] = None, + location: str = "Vertex", + time: Optional[float] = None, + ) -> CGNSTree: + """Delete a field from a specified zone in the grid. + + Args: + name (str): The name of the field to be deleted. + zone_name (str, optional): The name of the zone from which the field will be deleted. Defaults to None. + base_name (str, optional): The name of the base where the zone is located. Defaults to None. + location (str, optional): The grid location where the field is stored. Defaults to 'Vertex'. + Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` + time (float, optional): The time associated with the field. Defaults to 0. + + Raises: + KeyError: Raised if the specified zone or field does not exist in the given base. + + Returns: + CGNSTree: The tree at the provided time (without the deleted node) + """ + # get_zone will look for default zone_name, base_name, and time + zone_node = self.get_zone(zone_name, base_name, time) + time = self.get_time_assignment(time) + mesh_tree = self._meshes[time] + + if zone_node is None: + raise KeyError( + f"There is no Zone with name {zone_name} in base {base_name}." + ) + + solution_paths = CGU.getPathsByTypeSet(zone_node, [CGK.FlowSolution_t]) + + updated_tree = None + for s_path in solution_paths: + if ( + CGU.getValueByPath(zone_node, f"{s_path}/GridLocation") + .tobytes() + .decode() + == location + ): + field_node = CGU.getNodeByPath(zone_node, f"{s_path}/{name}") + if field_node is not None: + updated_tree = CGU.nodeDelete(mesh_tree, field_node) + + # If the function reaches here, the field was not found + if updated_tree is None: + raise KeyError(f"There is no field with name {name} in the specified zone.") + + return updated_tree + + def get( + self, + name: str, + zone_name: Optional[str] = None, + base_name: Optional[str] = None, + location: str = "Vertex", + time: Optional[float] = None, + ) -> Field: + """Retrieve a field with a specified name from a given zone, base, location, and time. + + Args: + name (str): The name of the field to retrieve. + zone_name (str, optional): The name of the zone to search for. Defaults to None. + base_name (str, optional): The name of the base to search for. Defaults to None. + location (str, optional): The location at which to retrieve the field. Defaults to 'Vertex'. + Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` + time (float, optional): The time value to consider when searching for the field. If a specific time is not provided, the method will display the tree structure for the default time step. + + Returns: + Field: A set containing the names of the fields that match the specified criteria. + """ + # get_zone will look for default time + search_node = self.get_zone(zone_name, base_name, time) + if search_node is None: + return None + + is_empty = True + full_field = [] + + solution_paths = CGU.getPathsByTypeSet(search_node, [CGK.FlowSolution_t]) + + for f_path in solution_paths: + if ( + CGU.getValueByPath(search_node, f_path + "/GridLocation") + .tobytes() + .decode() + == location + ): + field = CGU.getValueByPath(search_node, f_path + "/" + name) + + if field is None: + field = np.empty((0,)) + else: + is_empty = False + full_field.append(field) + + if is_empty: + return None + else: + return np.concatenate(full_field) + + def get_names( + self, + zone_name: Optional[str] = None, + base_name: Optional[str] = None, + location: str = "Vertex", + time: Optional[float] = None, + ) -> list[str]: + """Get a set of field names associated with a specified zone, base, location, and time. + + Args: + zone_name (str, optional): The name of the zone to search for. Defaults to None. + base_name (str, optional): The name of the base to search for. Defaults to None. + location (str, optional): The desired grid location where the field is defined. Defaults to 'Vertex'. + Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` + time (float, optional): The specific time at which to retrieve field names. If a specific time is not provided, the method will display the tree structure for the default time step. + + Returns: + set[str]: A set containing the names of the fields that match the specified criteria. + """ + + def get_field_names_one_base(base_name: str) -> list[str]: + # get_zone will look for default zone_name, base_name, time + search_node = self.get_zone(zone_name, base_name, time) + if search_node is None: # pragma: no cover + return [] + + names = [] + solution_paths = CGU.getPathsByTypeSet(search_node, [CGK.FlowSolution_t]) + for f_path in solution_paths: + if ( + CGU.getValueByPath(search_node, f_path + "/GridLocation") + .tobytes() + .decode() + != location + ): + continue + f_node = CGU.getNodeByPath(search_node, f_path) + for path in CGU.getPathByTypeFilter(f_node, CGK.DataArray_t): + field_name = path.split("/")[-1] + if not (field_name == "GridLocation"): + names.append(field_name) + return names + + if base_name is None: + # get_base_names will look for default time + base_names = self.get_base_names(time=time) + else: + base_names = [base_name] + + all_names = [] + for bn in base_names: + all_names += get_field_names_one_base(bn) + + all_names.sort() + all_names = list(set(all_names)) + + return all_names + + def init_tree(self, time: float = None) -> CGNSTree: + """Initialize a CGNS tree structure at a specified time step or create a new one if it doesn't exist. + + Args: + time (float, optional): The time step for which to initialize the CGNS tree structure. If a specific time is not provided, the method will display the tree structure for the default time step. + + Returns: + CGNSTree (list): The initialized or existing CGNS tree structure for the specified time step. + """ + time = self.get_time_assignment(time) + + if self._meshes is None: + self._meshes = {time: CGL.newCGNSTree()} + self._links = {time: None} + self._paths = {time: None} + elif time not in self._meshes: + self._meshes[time] = CGL.newCGNSTree() + self._links[time] = None + self._paths[time] = None + + return self._meshes[time] + + def get_zone( + self, zone_name: str = None, base_name: str = None, time: float = None + ) -> CGNSNode: + """Retrieve a CGNS Zone node by its name within a specific Base and time. + + Args: + zone_name (str, optional): The name of the Zone node to retrieve. If not specified, checks that there is **at most** one zone in the base, else raises an error. Defaults to None. + base_name (str, optional): The Base in which to seek to zone retrieve. If not specified, checks that there is **at most** one base, else raises an error. Defaults to None. + time (float, optional): Time at which you want to retrieve the Zone node. + + Returns: + CGNSNode: Returns a CGNS Zone node if found; otherwise, returns None. + """ + # get_base will look for default base_name and time + base_node = self.get_base(base_name, time) + if base_node is None: + logger.warning(f"No base with name {base_name} and this tree") + return None + + # _zone_attribution will look for default base_name + zone_name = self.get_zone_assignment(zone_name, base_name, time) + if zone_name is None: + logger.warning(f"No zone with name {zone_name} and this base ({base_name})") + return None + + return CGU.getNodeByPath(base_node, zone_name) + + def get_time_assignment(self, time: float = None) -> float: + """Retrieve the default time for the CGNS operations. + + If there are available time steps, it will return the first one; otherwise, it will return 0.0. + + Args: + time (str, optional): The time value provided for the operation. If not provided, the default time set in the system will be used. + + Returns: + float: The attributed time. + + Note: + - The default time step is used as a reference point for many CGNS operations. + - It is important for accessing and visualizing data at specific time points in a simulation. + """ + if self._defaults["active_time"] is None and time is None: + timestamps = self.get_all_mesh_times() + return sorted(timestamps)[0] if len(timestamps) > 0 else 0.0 + return self._defaults["active_time"] if time is None else time + + def get_base_names( + self, full_path: bool = False, unique: bool = False, time: float = None + ) -> list[str]: + """Return Base names. + + Args: + full_path (bool, optional): If True, returns full paths instead of only Base names. Defaults to False. + unique (bool, optional): If True, returns unique names instead of potentially duplicated names. Defaults to False. + time (float, optional): The time at which to check for the Base. If a specific time is not provided, the method will display the tree structure for the default time step. + + Returns: + list[str]: + """ + time = self.get_time_assignment(time) + + if self._meshes is not None: + if self._meshes[time] is not None: + return CGH.get_base_names( + self._meshes[time], full_path=full_path, unique=unique + ) + else: + return [] + + def get_zone_assignment( + self, zone_name: str = None, base_name: str = None, time: float = None + ) -> str: + """Retrieve the default zone name for the CGNS operations. + + This function calculates the attributed zone for a specific operation based on the + default zone set in the system, within the specified base. + + Args: + zone_name (str, optional): The name of the zone to attribute the operation to. If not provided, the default zone set in the system within the specified base will be used. + base_name (str, optional): The name of the base within which the zone should be attributed. If not provided, the default base set in the system will be used. + time (str, optional): The time value provided for the operation. If not provided, the default time set in the system will be used. + + Raises: + KeyError: If no default zone can be determined based on the provided or default values. + KeyError: If no zone node is found after following given and default parameters. + + Returns: + str: The attributed zone name. + + Note: + - If neither a specific zone name nor a specific base name is provided, the function will use the default zone provided by the user. + - In case the default zone does not exist: If no specific time is provided, the function will use the default time provided by the user. + """ + zone_name = zone_name or self._defaults.get("active_zone") + + if zone_name: + return zone_name + + base_name = self.get_base_assignment(base_name, time) + zone_names = self.get_zone_names(base_name, time=time) + if len(zone_names) == 0: + return None + elif len(zone_names) == 1: + # logging.info(f"No default zone provided. Taking the only zone available: {zone_names[0]} in default base: {base_name}") + return zone_names[0] + + raise KeyError( + f"No default zone provided among {zone_names} in the default base: {base_name}" + ) + + def get_base(self, base_name: str = None, time: float = None) -> CGNSNode: + """Return Base node named `base_name`. + + If `base_name` is not specified, checks that there is **at most** one base, else raises an error. + + Args: + base_name (str, optional): The name of the Base node to retrieve. Defaults to None. Defaults to None. + time (float, optional): Time at which you want to retrieve the Base node. If a specific time is not provided, the method will display the tree structure for the default time step. + + Returns: + CGNSNode or None: The Base node with the specified name or None if it is not found. + """ + time = self.get_time_assignment(time) + base_name = self.get_base_assignment(base_name, time) + + if (self._meshes is None) or (self._meshes[time] is None): + logger.warning(f"No base with name {base_name} and this tree") + return None + + return CGU.getNodeByPath(self._meshes[time], f"/CGNSTree/{base_name}") + + def get_base_assignment(self, base_name: str = None, time: float = None) -> str: + """Retrieve the default base name for the CGNS operations. + + This function calculates the attributed base for a specific operation based on the + default base set in the system. + + Args: + base_name (str, optional): The name of the base to attribute the operation to. If not provided, the default base set in the system will be used. + time (str, optional): The time value provided for the operation. If not provided, the default time set in the system will be used. + + Raises: + KeyError: If no default base can be determined based on the provided or default. + KeyError: If no base node is found after following given and default parameters. + + Returns: + str: The attributed base name. + + Note: + - If no specific base name is provided, the function will use the default base provided by the user. + - In case the default base does not exist: If no specific time is provided, the function will use the default time provided by the user. + """ + base_name = base_name or self._defaults.get("active_base") + + if base_name: + return base_name + + base_names = self.get_base_names(time=time) + if len(base_names) == 0: + return None + elif len(base_names) == 1: + # logging.info(f"No default base provided. Taking the only base available: {base_names[0]}") + return base_names[0] + + raise KeyError(f"No default base provided among {base_names}") + + def get_zone_names( + self, + base_name: str = None, + full_path: bool = False, + unique: bool = False, + time: float = None, + ) -> list[str]: + """Return list of Zone names in Base named `base_name` with specific time. + + Args: + base_name (str, optional): Name of Base where to search Zones. If not specified, checks if there is at most one Base. Defaults to None. + full_path (bool, optional): If True, returns full paths instead of only Zone names. Defaults to False. + unique (bool, optional): If True, returns unique names instead of potentially duplicated names. Defaults to False. + time (float, optional): The time at which to check for the Zone. If a specific time is not provided, the method will display the tree structure for the default time step. + + Returns: + list[str]: List of Zone names in Base named `base_name`, empty if there is none or if the Base doesn't exist. + """ + zone_paths = [] + + # get_base will look for default base_name and time + base_node = self.get_base(base_name, time) + if base_node is not None: + z_paths = CGU.getPathsByTypeSet(base_node, "CGNSZone_t") + for pth in z_paths: + s_pth = pth.split("/") + assert len(s_pth) == 2 + assert s_pth[0] == base_name or base_name is None + if full_path: + zone_paths.append(pth) + else: + zone_paths.append(s_pth[1]) + + if unique: + return list(set(zone_paths)) + else: + return zone_paths diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index 6dc704c6..0e0b45a2 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -200,8 +200,8 @@ def __init__( self._mesh_zone_name: str = mesh_zone_name self._meshes: dict[float, CGNSTree] = {} - self._scalars: dict[str, ScalarType] = {} - self._time_series: dict[str, TimeSeriesType] = {} + self._scalars: dict[str, Scalar] = {} + self._time_series: dict[str, TimeSeries] = {} self._links: dict[float, list[LinkType]] = links self._paths: dict[float, list[PathType]] = paths diff --git a/src/plaid/containers/sample_container/__init__.py b/src/plaid/containers/sample_container/__init__.py deleted file mode 100644 index 8a11c4d5..00000000 --- a/src/plaid/containers/sample_container/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Implements the `Sample` container and features managers.""" diff --git a/src/plaid/containers/sample_container/field_manager.py b/src/plaid/containers/sample_container/field_manager.py deleted file mode 100644 index bbee7753..00000000 --- a/src/plaid/containers/sample_container/field_manager.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Module that implements the `FieldManager` class that holds the responsability of managing fields within a Sample.""" - -from plaid.types import CGNSTree, FieldType - - -class FieldManager: - """Manager object for scalars.""" - - def __init__(self): - self.features: dict[str, FieldType] = {} - - def add_field( - self, - name: str, - field: FieldType, - zone_name: str = None, - base_name: str = None, - location: str = "Vertex", - time: float = None, - ) -> None: - """Add field".""" - pass - - def remove_field( - self, - name: str, - zone_name: str = None, - base_name: str = None, - location: str = "Vertex", - time: float = None, - ) -> CGNSTree: - """Remove field.""" - pass - - def get_field( - self, - name: str, - zone_name: str = None, - base_name: str = None, - location: str = "Vertex", - time: float = None, - ) -> FieldType: - """Get field.""" - pass - - def get_field_names( - self, - zone_name: str = None, - base_name: str = None, - location: str = "Vertex", - time: float = None, - ) -> set[str]: - """Get all fields names.""" - pass diff --git a/src/plaid/containers/sample_container/scalar_manager.py b/src/plaid/containers/sample_container/scalar_manager.py deleted file mode 100644 index e0d46ae5..00000000 --- a/src/plaid/containers/sample_container/scalar_manager.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Module that implements the `ScalarManager` class that holds the responsability of managing scalars within a Sample.""" - -from plaid.types import ScalarType - - -class ScalarCollection: - """Manager object for scalars.""" - - def __init__(self): - self.features: dict[str, ScalarType] = {} - - def add(self, name: str, value: ScalarType) -> None: - """Add a scalar.""" - self.features[name] = value - - def remove(self, name: str) -> ScalarType: - """Remove a scalar.""" - return self.features.pop(name) - - def get(self, name: str) -> ScalarType: - """Get scalar.""" - return self.features[name] - - def get_names(self) -> set[str]: - """Get all the scalars names.""" - return sorted(self.features.keys()) From e6b303530ed18f94dfc4dd7dd97d20455097d19c Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Tue, 26 Aug 2025 22:19:11 +0200 Subject: [PATCH 04/13] :recycle: Introduce SampleScalars --- .../2D_MultiScHypEl/construct_prediction.py | 2 +- .../prepare_2D_MultiScHypEl.py | 5 +- .../FNO/2D_MultiScHypEl/train_and_predict.py | 8 +- .../FNO/Rotor37/construct_prediction.py | 2 +- benchmarks/FNO/Rotor37/prepare_rotor37.py | 5 +- benchmarks/FNO/Rotor37/train_and_predict.py | 8 +- .../FNO/Tensile2d/construct_prediction.py | 2 +- benchmarks/FNO/Tensile2d/prepare_tensile2d.py | 4 +- benchmarks/FNO/Tensile2d/train_and_predict.py | 8 +- .../FNO/VKI-LS59/construct_prediction.py | 2 +- benchmarks/FNO/VKI-LS59/prepare_vki.py | 5 +- benchmarks/FNO/VKI-LS59/train_and_predict.py | 10 +- benchmarks/MGN/data.py | 4 +- benchmarks/MMGP/Rotor37/run_rotor37.py | 10 +- .../MMGP/Tensile2d/construct_prediction.py | 2 +- benchmarks/MMGP/VKI-LS59/data.py | 4 +- .../Vi-Transf/main_elasto_plasto_dynamics.py | 12 +- benchmarks/Vi-Transf/main_stationary.py | 12 +- .../bridges/airfrans_sample_to_geometric.py | 8 +- .../bridges/base_sample_to_geometric.py | 4 +- .../bridges/multiscale_sample_to_geometric.py | 4 +- .../bridges/tensile_sample_to_geometric.py | 4 +- .../loader/bridges/vki_sample_to_geometric.py | 4 +- .../convert_users_data_into_plaid.ipynb | 4 +- docs/source/notebooks/dataset.ipynb | 46 +- docs/source/notebooks/huggingface.ipynb | 4 +- docs/source/notebooks/init_with_tabular.ipynb | 4 +- docs/source/notebooks/pipeline.ipynb | 4 +- docs/source/notebooks/sample.ipynb | 14 +- docs/source/notebooks/stats.ipynb | 4 +- .../bridges/huggingface_bridge_example.py | 4 +- examples/containers/bench_parallel_load.py | 4 +- examples/containers/dataset_example.py | 46 +- examples/containers/sample_example.py | 16 +- examples/convert_users_data_example.py | 4 +- examples/pipelines/pipeline.py | 4 +- examples/utils/init_with_tabular_example.py | 4 +- examples/utils/stats_example.py | 6 +- src/plaid/containers/collections.py | 527 +----------------- src/plaid/containers/dataset.py | 6 +- src/plaid/containers/sample.py | 131 ++--- src/plaid/post/bisect.py | 4 +- src/plaid/problem_definition.py | 4 +- src/plaid/utils/init_with_tabular.py | 2 +- src/plaid/utils/stats.py | 4 +- tests/bridges/test_huggingface_bridge.py | 2 +- tests/conftest.py | 4 +- tests/containers/test_dataset.py | 2 +- tests/containers/test_sample.py | 66 +-- tests/utils/test_init_with_tabular.py | 4 +- tests/utils/test_stats.py | 4 +- 51 files changed, 245 insertions(+), 812 deletions(-) diff --git a/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py b/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py index 75065279..a1528333 100644 --- a/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py +++ b/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py @@ -65,7 +65,7 @@ for fn in out_fields_names: prediction[count][fn] = op.dot(sample_pred.get_field(fn)) for sn in out_scalars_names: - prediction[count][sn] = sample_pred.get_scalar(sn) + prediction[count][sn] = sample_pred.scalars.get(sn) count += 1 diff --git a/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py b/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py index 32f0a577..dc11efc4 100644 --- a/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py +++ b/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py @@ -6,7 +6,6 @@ from Muscat.Containers.MeshFieldOperations import GetFieldTransferOp from Muscat.FE.Fields.FEField import FEField from Muscat.Bridges.CGNSBridge import MeshToCGNS,CGNSToMesh -import Muscat.Containers.ElementsDescription as ED from Muscat.Containers.ConstantRectilinearMeshTools import CreateConstantRectilinearMesh from Muscat.Containers.MeshTetrahedrization import Tetrahedrization from Muscat.Containers.MeshModificationTools import ComputeSkin @@ -109,8 +108,8 @@ def compute_signed_distance(mesh,eval_points): for scalar_name in scalar_names: - old_scalar= sample.get_scalar( name=scalar_name) - new_sample.add_scalar(scalar_name, old_scalar) + old_scalar= sample.scalars.get( name=scalar_name) + new_sample.scalars.add(scalar_name, old_scalar) new_sample.add_field("Signed_Distance",compute_signed_distance(copy.deepcopy(input_mesh),rec_mesh.nodes)) path = os.path.join(prepared_data_dir,"dataset/samples/sample_{:09d}".format(sample_index)) diff --git a/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py b/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py index 8990aaa0..d0bae25f 100644 --- a/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py +++ b/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py @@ -46,14 +46,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size, size)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :] = dataset[id_sample].get_field(fn).reshape((size, size)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].get_scalar(sn) + outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].scalars.get(sn) min_in = inputs.min(axis=(0, 2, 3), keepdims=True) @@ -125,7 +125,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) inputs = (inputs - min_in) / (max_in - min_in) @@ -154,7 +154,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) + dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/FNO/Rotor37/construct_prediction.py b/benchmarks/FNO/Rotor37/construct_prediction.py index dbe90b7e..a7b3daa7 100644 --- a/benchmarks/FNO/Rotor37/construct_prediction.py +++ b/benchmarks/FNO/Rotor37/construct_prediction.py @@ -65,7 +65,7 @@ for fn in out_fields_names: prediction[count][fn] = op.dot(sample_pred.get_field(fn)) for sn in out_scalars_names: - prediction[count][sn] = sample_pred.get_scalar(sn) + prediction[count][sn] = sample_pred.scalars.get(sn) count += 1 diff --git a/benchmarks/FNO/Rotor37/prepare_rotor37.py b/benchmarks/FNO/Rotor37/prepare_rotor37.py index d6815c8a..bc7c0d7d 100644 --- a/benchmarks/FNO/Rotor37/prepare_rotor37.py +++ b/benchmarks/FNO/Rotor37/prepare_rotor37.py @@ -7,7 +7,6 @@ from Muscat.FE.Fields.FEField import FEField from Muscat.Bridges.CGNSBridge import MeshToCGNS,CGNSToMesh from Muscat.Containers.ConstantRectilinearMeshTools import CreateConstantRectilinearMesh -from Muscat.Containers.MeshTetrahedrization import Tetrahedrization from Muscat.Containers.MeshModificationTools import ComputeSkin from Muscat.FE.FETools import PrepareFEComputation from Muscat.FE.FETools import ComputeNormalsAtPoints @@ -113,8 +112,8 @@ def compute_signed_distance(mesh,eval_points): for scalar_name in scalar_names: - old_scalar= sample.get_scalar( name=scalar_name) - new_sample.add_scalar(scalar_name, old_scalar) + old_scalar= sample.scalars.get( name=scalar_name) + new_sample.scalars.add(scalar_name, old_scalar) path = os.path.join(prepared_data_dir,"dataset/samples/sample_{:09d}".format(sample_index)) diff --git a/benchmarks/FNO/Rotor37/train_and_predict.py b/benchmarks/FNO/Rotor37/train_and_predict.py index 92f88d6f..d383ed15 100644 --- a/benchmarks/FNO/Rotor37/train_and_predict.py +++ b/benchmarks/FNO/Rotor37/train_and_predict.py @@ -47,14 +47,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :, :] = dataset[id_sample].scalars.get(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size, size, size)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :, :] = dataset[id_sample].get_field(fn).reshape((size, size, size)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :, :] = dataset[id_sample].get_scalar(sn) + outputs[i, k+len(out_fields_names), :, :, :] = dataset[id_sample].scalars.get(sn) min_in = inputs.min(axis=(0, 2, 3, 4), keepdims=True) @@ -126,7 +126,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :, :] = dataset[id_sample].scalars.get(sn) inputs = (inputs - min_in) / (max_in - min_in+ 1e-8) @@ -155,7 +155,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :, :].flatten())) + dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/FNO/Tensile2d/construct_prediction.py b/benchmarks/FNO/Tensile2d/construct_prediction.py index d0c7c855..27760e12 100644 --- a/benchmarks/FNO/Tensile2d/construct_prediction.py +++ b/benchmarks/FNO/Tensile2d/construct_prediction.py @@ -66,7 +66,7 @@ for fn in out_fields_names: prediction[count][fn] = op.dot(sample_pred.get_field(fn)) for sn in out_scalars_names: - prediction[count][sn] = sample_pred.get_scalar(sn) + prediction[count][sn] = sample_pred.scalars.get(sn) count += 1 diff --git a/benchmarks/FNO/Tensile2d/prepare_tensile2d.py b/benchmarks/FNO/Tensile2d/prepare_tensile2d.py index 8691482f..55a1bed9 100644 --- a/benchmarks/FNO/Tensile2d/prepare_tensile2d.py +++ b/benchmarks/FNO/Tensile2d/prepare_tensile2d.py @@ -103,8 +103,8 @@ def compute_signed_distance(mesh,eval_points): for scalar_name in scalar_names: - old_scalar= sample.get_scalar( name=scalar_name) - new_sample.add_scalar(scalar_name, old_scalar) + old_scalar= sample.scalars.get( name=scalar_name) + new_sample.scalars.add(scalar_name, old_scalar) new_sample.add_field("Signed_Distance",compute_signed_distance(copy.deepcopy(input_mesh),rec_mesh.nodes)) path = os.path.join(prepared_data_dir,"dataset/samples/sample_{:09d}".format(sample_index)) diff --git a/benchmarks/FNO/Tensile2d/train_and_predict.py b/benchmarks/FNO/Tensile2d/train_and_predict.py index faeac67b..f4903fd0 100644 --- a/benchmarks/FNO/Tensile2d/train_and_predict.py +++ b/benchmarks/FNO/Tensile2d/train_and_predict.py @@ -47,14 +47,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size, size)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :] = dataset[id_sample].get_field(fn).reshape((size, size)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].get_scalar(sn) + outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].scalars.get(sn) min_in = inputs.min(axis=(0, 2, 3), keepdims=True) @@ -124,7 +124,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) inputs = (inputs - min_in) / (max_in - min_in) @@ -152,7 +152,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) + dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/FNO/VKI-LS59/construct_prediction.py b/benchmarks/FNO/VKI-LS59/construct_prediction.py index cb3c62bf..d5fb7525 100644 --- a/benchmarks/FNO/VKI-LS59/construct_prediction.py +++ b/benchmarks/FNO/VKI-LS59/construct_prediction.py @@ -62,7 +62,7 @@ for fn in out_fields_names: prediction[count][fn] = sample_pred.get_field(fn, base_name="Base_2_2") for sn in out_scalars_names: - prediction[count][sn] = sample_pred.get_scalar(sn) + prediction[count][sn] = sample_pred.scalars.get(sn) count += 1 diff --git a/benchmarks/FNO/VKI-LS59/prepare_vki.py b/benchmarks/FNO/VKI-LS59/prepare_vki.py index defae36a..3154e82c 100644 --- a/benchmarks/FNO/VKI-LS59/prepare_vki.py +++ b/benchmarks/FNO/VKI-LS59/prepare_vki.py @@ -1,11 +1,8 @@ from plaid.problem_definition import ProblemDefinition from plaid.containers.sample import Sample -import numpy as np from Muscat.Bridges.CGNSBridge import MeshToCGNS -import Muscat.Containers.ElementsDescription as ED from Muscat.Containers.ConstantRectilinearMeshTools import CreateConstantRectilinearMesh from Muscat.Containers.MeshTetrahedrization import Tetrahedrization -from Muscat.Containers import MeshCreationTools as MCT import os, time, shutil from tqdm import tqdm @@ -64,7 +61,7 @@ raise("unkown sample_index") for sn in scalar_names: - new_sample.add_scalar(sn, sample.get_scalar(sn)) + new_sample.scalars.add(sn, sample.scalars.get(sn)) new_sample.add_field("Signed_Distance", sample.get_field("sdf", base_name="Base_2_2")) diff --git a/benchmarks/FNO/VKI-LS59/train_and_predict.py b/benchmarks/FNO/VKI-LS59/train_and_predict.py index 445b697d..b4417265 100644 --- a/benchmarks/FNO/VKI-LS59/train_and_predict.py +++ b/benchmarks/FNO/VKI-LS59/train_and_predict.py @@ -49,14 +49,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance", base_name="Base_2_2").reshape((size1, size2)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size1, size2)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :] = dataset[id_sample].get_field(fn, base_name="Base_2_2").reshape((size1, size2)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].get_scalar(sn) + outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].scalars.get(sn) min_in = inputs.min(axis=(0, 2, 3), keepdims=True) @@ -70,7 +70,7 @@ import torch -from torch.utils.data import Dataset, TensorDataset +from torch.utils.data import Dataset class GridDataset(Dataset): def __init__(self, inputs, outputs): @@ -128,7 +128,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance", base_name="Base_2_2").reshape((size1, size2)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) + inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) inputs = (inputs - min_in) / (max_in - min_in) @@ -145,7 +145,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) + dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/MGN/data.py b/benchmarks/MGN/data.py index 6837cb14..a82f91bb 100644 --- a/benchmarks/MGN/data.py +++ b/benchmarks/MGN/data.py @@ -231,7 +231,7 @@ def process_samples(dataset_name, dataset_path, indices, field_names, process_ty # Retrieve input scalars in_scalars_names = scalar_input_dict.get(dataset_name, []) X_scalars.append( - [mesh_data.get_scalar(fn) for fn in in_scalars_names] + [mesh_data.scalars.get(fn) for fn in in_scalars_names] if in_scalars_names else [] ) @@ -239,7 +239,7 @@ def process_samples(dataset_name, dataset_path, indices, field_names, process_ty # Retrieve output scalars out_scalars_names = scalar_output_dict.get(dataset_name, []) Y_scalars.append( - [mesh_data.get_scalar(fn) for fn in out_scalars_names] + [mesh_data.scalars.get(fn) for fn in out_scalars_names] if out_scalars_names else [] ) diff --git a/benchmarks/MMGP/Rotor37/run_rotor37.py b/benchmarks/MMGP/Rotor37/run_rotor37.py index 6982abea..79bf91ea 100644 --- a/benchmarks/MMGP/Rotor37/run_rotor37.py +++ b/benchmarks/MMGP/Rotor37/run_rotor37.py @@ -58,16 +58,16 @@ def convert_data( sample = Sample.model_validate(pickle.loads(dataset[id]["sample"])) nodes.append(sample.get_nodes()) - omega = sample.get_scalar("Omega") - pressure = sample.get_scalar("P") + omega = sample.scalars.get("Omega") + pressure = sample.scalars.get("P") density = sample.get_field("Density") pressure_field = sample.get_field("Pressure") temperature = sample.get_field("Temperature") - massflow = sample.get_scalar("Massflow") - compression_ratio = sample.get_scalar("Compression_ratio") - efficiency = sample.get_scalar("Efficiency") + massflow = sample.scalars.get("Massflow") + compression_ratio = sample.scalars.get("Compression_ratio") + efficiency = sample.scalars.get("Efficiency") X_scalars.append(np.array([omega, pressure])) Y_scalars.append(np.array([massflow, compression_ratio, efficiency])) diff --git a/benchmarks/MMGP/Tensile2d/construct_prediction.py b/benchmarks/MMGP/Tensile2d/construct_prediction.py index a218bdce..d48f37f2 100644 --- a/benchmarks/MMGP/Tensile2d/construct_prediction.py +++ b/benchmarks/MMGP/Tensile2d/construct_prediction.py @@ -43,7 +43,7 @@ for fn in out_fields_names: prediction[count][fn] = sample_pred.get_field(fn+"_predicted") for sn in out_scalars_names: - prediction[count][sn] = sample_pred.get_scalar(sn+"_predicted") + prediction[count][sn] = sample_pred.scalars.get(sn+"_predicted") count += 1 diff --git a/benchmarks/MMGP/VKI-LS59/data.py b/benchmarks/MMGP/VKI-LS59/data.py index 4d2eb25b..83fe147a 100644 --- a/benchmarks/MMGP/VKI-LS59/data.py +++ b/benchmarks/MMGP/VKI-LS59/data.py @@ -56,7 +56,7 @@ def extract_split_data( # Input scalar values for key in input_scalars: - inputs[key] = [plaid_dataset[i].get_scalar(key) for i in ids] + inputs[key] = [plaid_dataset[i].scalars.get(key) for i in ids] # --- OUTPUTS --- # Selected mesh field data @@ -67,7 +67,7 @@ def extract_split_data( # Selected output scalar values for key in SCALAR_OUTPUTS: - outputs[key] = [plaid_dataset[i].get_scalar(key) for i in ids] + outputs[key] = [plaid_dataset[i].scalars.get(key) for i in ids] return inputs, outputs diff --git a/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py b/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py index 912cea7b..f009840e 100644 --- a/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py +++ b/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py @@ -213,14 +213,14 @@ def main(cfg): optimizer.step() for n, fn in enumerate(output_fields_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"train/loss/{fn}", epoch_train_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"train/loss/{sn}", epoch_train_scalar_losses[n].item(), epoch ) - tb_logger.add_scalar("train/loss", epoch_train_loss, epoch) + tb_logger.scalars.add("train/loss", epoch_train_loss, epoch) # validation loop epoch_val_loss = 0 @@ -245,14 +245,14 @@ def main(cfg): epoch_val_loss += loss.item() * (local_batch_size / len(val_dataset)) for n, fn in enumerate(output_fields_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"val/loss/{fn}", epoch_val_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"val/loss/{sn}", epoch_val_scalar_losses[n].item(), epoch ) - tb_logger.add_scalar("val/loss", epoch_val_loss, epoch) + tb_logger.scalars.add("val/loss", epoch_val_loss, epoch) logger.info( f"Epoch {epoch:>{len(str(epochs))}}: Train Loss: {epoch_train_loss:.5f} | Val Loss: {epoch_val_loss:.5f}" ) diff --git a/benchmarks/Vi-Transf/main_stationary.py b/benchmarks/Vi-Transf/main_stationary.py index 0c39bc6c..27ea1dec 100644 --- a/benchmarks/Vi-Transf/main_stationary.py +++ b/benchmarks/Vi-Transf/main_stationary.py @@ -176,14 +176,14 @@ def main(cfg): optimizer.step() for n, fn in enumerate(output_fields_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"train/loss/{fn}", epoch_train_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"train/loss/{sn}", epoch_train_scalar_losses[n].item(), epoch ) - tb_logger.add_scalar("train/loss", epoch_train_loss, epoch) + tb_logger.scalars.add("train/loss", epoch_train_loss, epoch) # validation loop epoch_val_loss = 0 @@ -208,14 +208,14 @@ def main(cfg): epoch_val_loss += loss.item() * (local_batch_size / len(val_dataset)) for n, fn in enumerate(output_fields_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"val/loss/{fn}", epoch_val_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.add_scalar( + tb_logger.scalars.add( f"val/loss/{sn}", epoch_val_scalar_losses[n].item(), epoch ) - tb_logger.add_scalar("val/loss", epoch_val_loss, epoch) + tb_logger.scalars.add("val/loss", epoch_val_loss, epoch) logger.info( f"Epoch {epoch:>{len(str(epochs))}}: Train Loss: {epoch_train_loss:.5f} | Val Loss: {epoch_val_loss:.5f}" ) diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py index 71e4e2d0..917177c6 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py @@ -30,11 +30,11 @@ def airfrans_sample_to_geometric( edge_weight = np.linalg.norm(v2 - v1, axis=1) # loading scalars - aoa = sample.get_scalar("angle_of_attack") - inlet_velocity = sample.get_scalar("inlet_velocity") + aoa = sample.scalars.get("angle_of_attack") + inlet_velocity = sample.scalars.get("inlet_velocity") u_inlet = [np.cos(aoa) * inlet_velocity, np.sin(aoa) * inlet_velocity] - cl = sample.get_scalar("C_L") - cd = sample.get_scalar("C_D") + cl = sample.scalars.get("C_L") + cd = sample.scalars.get("C_D") output_scalars = np.array([cl, cd]) # loading fields diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py index 8521b0cd..0e325f81 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py @@ -46,9 +46,9 @@ def base_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.get_scalar(name)) + input_scalars.append(sample.scalars.get(name)) for name in output_scalars_names: - output_scalars.append(sample.get_scalar(name)) + output_scalars.append(sample.scalars.get(name)) # loading fields input_fields_names = problem_definition.get_input_fields_names() diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py index d9d6f561..d8059a24 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py @@ -52,9 +52,9 @@ def multiscale_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.get_scalar(name)) + input_scalars.append(sample.scalars.get(name)) for name in output_scalars_names: - output_scalars.append(sample.get_scalar(name)) + output_scalars.append(sample.scalars.get(name)) input_fields = vertices input_fields_names = ["x", "y"] diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py index c16850f9..b5956650 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py @@ -61,9 +61,9 @@ def tensile_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.get_scalar(name)) + input_scalars.append(sample.scalars.get(name)) for name in output_scalars_names: - output_scalars.append(sample.get_scalar(name)) + output_scalars.append(sample.scalars.get(name)) # sdf and one hot encoding border_ids = get_border_ids(vertices, faces) diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py index d8e18b2f..649e6957 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py @@ -37,9 +37,9 @@ def vki_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.get_scalar(name)) + input_scalars.append(sample.scalars.get(name)) for name in output_scalars_names: - output_scalars.append(sample.get_scalar(name)) + output_scalars.append(sample.scalars.get(name)) if len(input_fields_names) >= 1: input_fields = [] diff --git a/docs/source/notebooks/convert_users_data_into_plaid.ipynb b/docs/source/notebooks/convert_users_data_into_plaid.ipynb index 038871dc..a2e8bac7 100644 --- a/docs/source/notebooks/convert_users_data_into_plaid.ipynb +++ b/docs/source/notebooks/convert_users_data_into_plaid.ipynb @@ -196,10 +196,10 @@ "\n", " # Add random scalar values to the sample\n", " for sname in in_scalars_names:\n", - " sample.add_scalar(sname, np.random.randn())\n", + " sample.scalars.add(sname, np.random.randn())\n", "\n", " for sname in out_scalars_names:\n", - " sample.add_scalar(sname, np.random.randn())\n", + " sample.scalars.add(sname, np.random.randn())\n", "\n", " # Add random field values to the sample\n", " for j, sname in enumerate(out_fields_names):\n", diff --git a/docs/source/notebooks/dataset.ipynb b/docs/source/notebooks/dataset.ipynb index 463efb4a..91e11056 100644 --- a/docs/source/notebooks/dataset.ipynb +++ b/docs/source/notebooks/dataset.ipynb @@ -151,7 +151,7 @@ "outputs": [], "source": [ "# Add a scalar to the Sample\n", - "sample_01.add_scalar(\"rotation\", np.random.randn())\n", + "sample_01.scalars.add(\"rotation\", np.random.randn())\n", "print(f\"{sample_01 = }\")" ] }, @@ -181,7 +181,7 @@ "outputs": [], "source": [ "# Add a scalar to the second Sample\n", - "sample_02.add_scalar(\"rotation\", np.random.randn())\n", + "sample_02.scalars.add(\"rotation\", np.random.randn())\n", "print(f\"{sample_02 = }\")" ] }, @@ -201,8 +201,8 @@ "# Initialize a third empty Sample\n", "print(\"#---# Empty Sample\")\n", "sample_03 = Sample()\n", - "sample_03.add_scalar(\"speed\", np.random.randn())\n", - "sample_03.add_scalar(\"rotation\", sample_01.get_scalar(\"rotation\"))\n", + "sample_03.scalars.add(\"speed\", np.random.randn())\n", + "sample_03.scalars.add(\"rotation\", sample_01.scalars.get(\"rotation\"))\n", "sample_03.add_tree(cgns_mesh)\n", "\n", "# Show Sample CGNS content\n", @@ -237,9 +237,9 @@ "print(f\"{sample_03 = }\", end=\"\\n\\n\")\n", "\n", "# Print sample scalar data\n", - "print(f\"{sample_03.get_scalar_names() = }\")\n", - "print(f\"{sample_03.get_scalar('speed') = }\")\n", - "print(f\"{sample_03.get_scalar('rotation') = }\", end=\"\\n\\n\")\n", + "print(f\"{sample_03.scalars.get_names() = }\")\n", + "print(f\"{sample_03.scalars.get('speed') = }\")\n", + "print(f\"{sample_03.scalars.get('rotation') = }\", end=\"\\n\\n\")\n", "\n", "# Print sample scalar data\n", "print(f\"{sample_03.get_field_names() = }\")\n", @@ -422,9 +422,9 @@ "print(f\"{dataset[1] = }\") # getitem strategy\n", "print(f\"{dataset[2] = }\", end=\"\\n\\n\")\n", "\n", - "print(\"scalar of the first sample = \", dataset[0].get_scalar_names())\n", - "print(\"scalar of the second sample = \", dataset[1].get_scalar_names())\n", - "print(\"scalar of the third sample = \", dataset[2].get_scalar_names())" + "print(\"scalar of the first sample = \", dataset[0].scalars.get_names())\n", + "print(\"scalar of the second sample = \", dataset[1].scalars.get_names())\n", + "print(\"scalar of the third sample = \", dataset[2].scalars.get_names())" ] }, { @@ -434,9 +434,9 @@ "outputs": [], "source": [ "# Access dataset information\n", - "print(f\"{dataset[0].get_scalar('rotation') = }\")\n", - "print(f\"{dataset[1].get_scalar('rotation') = }\")\n", - "print(f\"{dataset[2].get_scalar('rotation') = }\")" + "print(f\"{dataset[0].scalars.get('rotation') = }\")\n", + "print(f\"{dataset[1].scalars.get('rotation') = }\")\n", + "print(f\"{dataset[2].scalars.get('rotation') = }\")" ] }, { @@ -453,14 +453,14 @@ "outputs": [], "source": [ "# Print scalars in tabular format\n", - "print(f\"{dataset.get_scalar_names() = }\", end=\"\\n\\n\")\n", + "print(f\"{dataset.scalars.get_names() = }\", end=\"\\n\\n\")\n", "\n", - "dprint(\"get rotation scalar = \", dataset.get_scalars_to_tabular([\"rotation\"]))\n", - "dprint(\"get speed scalar = \", dataset.get_scalars_to_tabular([\"speed\"]), end=\"\\n\\n\")\n", + "dprint(\"get rotation scalar = \", dataset.scalars.gets_to_tabular([\"rotation\"]))\n", + "dprint(\"get speed scalar = \", dataset.scalars.gets_to_tabular([\"speed\"]), end=\"\\n\\n\")\n", "\n", "# Get specific scalars in tabular format\n", - "dprint(\"get specific scalars =\", dataset.get_scalars_to_tabular([\"speed\", \"rotation\"]))\n", - "dprint(\"get all scalars =\", dataset.get_scalars_to_tabular())" + "dprint(\"get specific scalars =\", dataset.scalars.gets_to_tabular([\"speed\", \"rotation\"]))\n", + "dprint(\"get all scalars =\", dataset.scalars.gets_to_tabular())" ] }, { @@ -470,7 +470,7 @@ "outputs": [], "source": [ "# Get specific scalars np.array\n", - "print(\"get all scalar arrays = \", dataset.get_scalars_to_tabular(as_nparray=True))" + "print(\"get all scalar arrays = \", dataset.scalars.gets_to_tabular(as_nparray=True))" ] }, { @@ -518,8 +518,8 @@ "samples = []\n", "for _ in range(nb_samples):\n", " sample = Sample()\n", - " sample.add_scalar(\"rotation\", np.random.rand() + 1.0)\n", - " sample.add_scalar(\"random_name\", np.random.rand() - 1.0)\n", + " sample.scalars.add(\"rotation\", np.random.rand() + 1.0)\n", + " sample.scalars.add(\"random_name\", np.random.rand() - 1.0)\n", " samples.append(sample)\n", "\n", "# Add a list of Samples\n", @@ -545,7 +545,7 @@ "dataset.merge_dataset(other_dataset)\n", "print(f\"after merge: {dataset = }\", end=\"\\n\\n\")\n", "\n", - "dprint(\"dataset scalars = \", dataset.get_scalars_to_tabular())" + "dprint(\"dataset scalars = \", dataset.scalars.gets_to_tabular())" ] }, { @@ -566,7 +566,7 @@ "dataset.add_tabular_scalars(new_scalars, names=[\"Tu\", \"random_name\"])\n", "\n", "print(f\"{dataset = }\")\n", - "dprint(\"dataset scalars =\", dataset.get_scalars_to_tabular())" + "dprint(\"dataset scalars =\", dataset.scalars.gets_to_tabular())" ] }, { diff --git a/docs/source/notebooks/huggingface.ipynb b/docs/source/notebooks/huggingface.ipynb index 1965a535..ec1b21ed 100644 --- a/docs/source/notebooks/huggingface.ipynb +++ b/docs/source/notebooks/huggingface.ipynb @@ -47,7 +47,7 @@ "def show_sample(sample: Sample):\n", " print(f\"sample = {sample}\")\n", " sample.show_tree()\n", - " print(f\"{sample.get_scalar_names() = }\")\n", + " print(f\"{sample.scalars.get_names() = }\")\n", " print(f\"{sample.get_field_names() = }\")" ] }, @@ -93,7 +93,7 @@ " sample = Sample()\n", "\n", " sample.add_tree(MeshToCGNS(mesh))\n", - " sample.add_scalar(\"scalar\", np.random.randn())\n", + " sample.scalars.add(\"scalar\", np.random.randn())\n", " sample.add_field(\"node_field\", np.random.rand(1, len(points)), location=\"Vertex\")\n", " sample.add_field(\n", " \"cell_field\", np.random.rand(1, len(points)), location=\"CellCenter\"\n", diff --git a/docs/source/notebooks/init_with_tabular.ipynb b/docs/source/notebooks/init_with_tabular.ipynb index 6ee0f742..92e0134d 100644 --- a/docs/source/notebooks/init_with_tabular.ipynb +++ b/docs/source/notebooks/init_with_tabular.ipynb @@ -114,7 +114,7 @@ "outputs": [], "source": [ "# Access and display the value of a particular scalar within a sample\n", - "scalar_value = sample_1.get_scalar(\"scalar_0\")\n", + "scalar_value = sample_1.scalars.get(\"scalar_0\")\n", "print(\"Scalar 'scalar_0' in Sample 1:\", scalar_value)" ] }, @@ -126,7 +126,7 @@ "source": [ "# Retrieve tabular data from the dataset based on scalar names\n", "scalar_names = [\"scalar_1\", \"scalar_3\", \"scalar_5\"]\n", - "tabular_data_subset = dataset.get_scalars_to_tabular(scalar_names)\n", + "tabular_data_subset = dataset.scalars.gets_to_tabular(scalar_names)\n", "print(\"Tabular Data Subset for Scalars 1, 3, and 5:\")\n", "dprint(\"tabular_data_subset\", tabular_data_subset)" ] diff --git a/docs/source/notebooks/pipeline.ipynb b/docs/source/notebooks/pipeline.ipynb index 9670e6ab..29b6bd8e 100644 --- a/docs/source/notebooks/pipeline.ipynb +++ b/docs/source/notebooks/pipeline.ipynb @@ -164,7 +164,7 @@ "source": [ "dataset_train = dataset_train.from_features_identifier(all_feature_id)\n", "print(\"dataset_train:\", dataset_train)\n", - "print(\"scalar names =\", dataset_train.get_scalar_names())\n", + "print(\"scalar names =\", dataset_train.scalars.get_names())\n", "print(\"field names =\", dataset_train.get_field_names())" ] }, @@ -216,7 +216,7 @@ "source": [ "preprocessed_dataset = preprocessor.fit_transform(dataset_train)\n", "print(\"preprocessed_dataset:\", preprocessed_dataset)\n", - "print(\"scalar names =\", preprocessed_dataset.get_scalar_names())\n", + "print(\"scalar names =\", preprocessed_dataset.scalars.get_names())\n", "print(\"field names =\", preprocessed_dataset.get_field_names())" ] }, diff --git a/docs/source/notebooks/sample.ipynb b/docs/source/notebooks/sample.ipynb index 2d0f421f..1090693b 100644 --- a/docs/source/notebooks/sample.ipynb +++ b/docs/source/notebooks/sample.ipynb @@ -54,7 +54,7 @@ "def show_sample(sample: Sample):\n", " print(f\"sample = {sample}\")\n", " sample.show_tree()\n", - " print(f\"{sample.get_scalar_names() = }\")\n", + " print(f\"{sample.scalars.get_names() = }\")\n", " print(f\"{sample.get_field_names() = }\")" ] }, @@ -156,7 +156,7 @@ "outputs": [], "source": [ "# Add a rotation scalar to this Sample\n", - "sample.add_scalar(\"rotation\", np.random.randn())\n", + "sample.scalars.add(\"rotation\", np.random.randn())\n", "\n", "show_sample(sample)" ] @@ -168,8 +168,8 @@ "outputs": [], "source": [ "# Add a more scalars to this Sample\n", - "sample.add_scalar(\"speed\", np.random.randn())\n", - "sample.add_scalar(\"other\", np.random.randn())\n", + "sample.scalars.add(\"speed\", np.random.randn())\n", + "sample.scalars.add(\"other\", np.random.randn())\n", "\n", "show_sample(sample)" ] @@ -402,9 +402,9 @@ "outputs": [], "source": [ "# It will look for a default base if no base and zone are given\n", - "print(f\"{sample.get_scalar_names() = }\")\n", - "print(f\"{sample.get_scalar('omega') = }\")\n", - "print(f\"{sample.get_scalar('rotation') = }\")" + "print(f\"{sample.scalars.get_names() = }\")\n", + "print(f\"{sample.scalars.get('omega') = }\")\n", + "print(f\"{sample.scalars.get('rotation') = }\")" ] }, { diff --git a/docs/source/notebooks/stats.ipynb b/docs/source/notebooks/stats.ipynb index a805602b..50e992ea 100644 --- a/docs/source/notebooks/stats.ipynb +++ b/docs/source/notebooks/stats.ipynb @@ -198,7 +198,7 @@ "spatial_shape_max = 20\n", "#\n", "for sample in samples:\n", - " sample.add_scalar(\"test_scalar\", np.random.randn())\n", + " sample.scalars.add(\"test_scalar\", np.random.randn())\n", " sample.init_base(2, 3, \"test_base\")\n", " zone_shape = np.array([0, 0, 0])\n", " sample.init_zone(zone_shape, zone_name=\"test_zone\")\n", @@ -242,7 +242,7 @@ "samples = [Sample() for _ in range(nb_samples)]\n", "\n", "for sample in samples:\n", - " sample.add_scalar(\"test_scalar\", np.random.randn())\n", + " sample.scalars.add(\"test_scalar\", np.random.randn())\n", " sample.init_base(2, 3, \"test_base\")\n", " zone_shape = np.array([0, 0, 0])\n", " sample.init_zone(zone_shape, zone_name=\"test_zone\")\n", diff --git a/examples/bridges/huggingface_bridge_example.py b/examples/bridges/huggingface_bridge_example.py index 05228513..3c9f4380 100644 --- a/examples/bridges/huggingface_bridge_example.py +++ b/examples/bridges/huggingface_bridge_example.py @@ -91,10 +91,10 @@ # Add random scalar values to the sample for sname in in_scalars_names: - sample.add_scalar(sname, np.random.randn()) + sample.scalars.add(sname, np.random.randn()) for sname in out_scalars_names: - sample.add_scalar(sname, np.random.randn()) + sample.scalars.add(sname, np.random.randn()) # Add random field values to the sample for j, sname in enumerate(out_fields_names): diff --git a/examples/containers/bench_parallel_load.py b/examples/containers/bench_parallel_load.py index d640d774..77c4f9c2 100644 --- a/examples/containers/bench_parallel_load.py +++ b/examples/containers/bench_parallel_load.py @@ -88,8 +88,8 @@ smp = tmpsmp # ---# Add some random data - smp.add_scalar("id", i) - smp.add_scalar("s0", np.random.randn()) + smp.scalars.add("id", i) + smp.scalars.add("s0", np.random.randn()) smp.add_field("f0", np.random.randn(100)) dset.add_sample(smp) diff --git a/examples/containers/dataset_example.py b/examples/containers/dataset_example.py index 0e56b7c9..625e9619 100644 --- a/examples/containers/dataset_example.py +++ b/examples/containers/dataset_example.py @@ -93,7 +93,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # %% # Add a scalar to the Sample -sample_01.add_scalar("rotation", np.random.randn()) +sample_01.scalars.add("rotation", np.random.randn()) print(f"{sample_01 = }") # %% [markdown] @@ -107,7 +107,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # %% # Add a scalar to the second Sample -sample_02.add_scalar("rotation", np.random.randn()) +sample_02.scalars.add("rotation", np.random.randn()) print(f"{sample_02 = }") # %% [markdown] @@ -117,8 +117,8 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # Initialize a third empty Sample print("#---# Empty Sample") sample_03 = Sample() -sample_03.add_scalar("speed", np.random.randn()) -sample_03.add_scalar("rotation", sample_01.get_scalar("rotation")) +sample_03.scalars.add("speed", np.random.randn()) +sample_03.scalars.add("rotation", sample_01.scalars.get("rotation")) sample_03.add_tree(cgns_mesh) # Show Sample CGNS content @@ -137,9 +137,9 @@ def dprint(name: str, dictio: dict, end: str = "\n"): print(f"{sample_03 = }", end="\n\n") # Print sample scalar data -print(f"{sample_03.get_scalar_names() = }") -print(f"{sample_03.get_scalar('speed') = }") -print(f"{sample_03.get_scalar('rotation') = }", end="\n\n") +print(f"{sample_03.scalars.get_names() = }") +print(f"{sample_03.scalars.get('speed') = }") +print(f"{sample_03.scalars.get('rotation') = }", end="\n\n") # Print sample scalar data print(f"{sample_03.get_field_names() = }") @@ -238,33 +238,33 @@ def dprint(name: str, dictio: dict, end: str = "\n"): print(f"{dataset[1] = }") # getitem strategy print(f"{dataset[2] = }", end="\n\n") -print("scalar of the first sample = ", dataset[0].get_scalar_names()) -print("scalar of the second sample = ", dataset[1].get_scalar_names()) -print("scalar of the third sample = ", dataset[2].get_scalar_names()) +print("scalar of the first sample = ", dataset[0].scalars.get_names()) +print("scalar of the second sample = ", dataset[1].scalars.get_names()) +print("scalar of the third sample = ", dataset[2].scalars.get_names()) # %% # Access dataset information -print(f"{dataset[0].get_scalar('rotation') = }") -print(f"{dataset[1].get_scalar('rotation') = }") -print(f"{dataset[2].get_scalar('rotation') = }") +print(f"{dataset[0].scalars.get('rotation') = }") +print(f"{dataset[1].scalars.get('rotation') = }") +print(f"{dataset[2].scalars.get('rotation') = }") # %% [markdown] # ### Get Dataset scalars to tabular # %% # Print scalars in tabular format -print(f"{dataset.get_scalar_names() = }", end="\n\n") +print(f"{dataset.scalars.get_names() = }", end="\n\n") -dprint("get rotation scalar = ", dataset.get_scalars_to_tabular(["rotation"])) -dprint("get speed scalar = ", dataset.get_scalars_to_tabular(["speed"]), end="\n\n") +dprint("get rotation scalar = ", dataset.scalars.gets_to_tabular(["rotation"])) +dprint("get speed scalar = ", dataset.scalars.gets_to_tabular(["speed"]), end="\n\n") # Get specific scalars in tabular format -dprint("get specific scalars =", dataset.get_scalars_to_tabular(["speed", "rotation"])) -dprint("get all scalars =", dataset.get_scalars_to_tabular()) +dprint("get specific scalars =", dataset.scalars.gets_to_tabular(["speed", "rotation"])) +dprint("get all scalars =", dataset.scalars.gets_to_tabular()) # %% # Get specific scalars np.array -print("get all scalar arrays =", dataset.get_scalars_to_tabular(as_nparray=True)) +print("get all scalar arrays =", dataset.scalars.gets_to_tabular(as_nparray=True)) # %% [markdown] # ### Get Dataset fields @@ -288,8 +288,8 @@ def dprint(name: str, dictio: dict, end: str = "\n"): samples = [] for _ in range(nb_samples): sample = Sample() - sample.add_scalar("rotation", np.random.rand() + 1.0) - sample.add_scalar("random_name", np.random.rand() - 1.0) + sample.scalars.add("rotation", np.random.rand() + 1.0) + sample.scalars.add("random_name", np.random.rand() - 1.0) samples.append(sample) # Add a list of Samples @@ -305,7 +305,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): dataset.merge_dataset(other_dataset) print(f"after merge: {dataset = }", end="\n\n") -dprint("dataset scalars = ", dataset.get_scalars_to_tabular()) +dprint("dataset scalars = ", dataset.scalars.gets_to_tabular()) # %% [markdown] # ### Add tabular scalars to a Dataset @@ -316,7 +316,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): dataset.add_tabular_scalars(new_scalars, names=["Tu", "random_name"]) print(f"{dataset = }") -dprint("dataset scalars =", dataset.get_scalars_to_tabular()) +dprint("dataset scalars =", dataset.scalars.gets_to_tabular()) # %% [markdown] # ### Set additional information to a dataset diff --git a/examples/containers/sample_example.py b/examples/containers/sample_example.py index 76db906c..77d111e1 100644 --- a/examples/containers/sample_example.py +++ b/examples/containers/sample_example.py @@ -32,7 +32,7 @@ def show_sample(sample: Sample): print(f"{sample = }") sample.show_tree() - print(f"{sample.get_scalar_names() = }") + print(f"{sample.scalars.get_names() = }") print(f"{sample.get_field_names() = }") @@ -92,14 +92,14 @@ def show_sample(sample: Sample): # %% # Add a rotation scalar to this Sample -sample.add_scalar("rotation", np.random.randn()) +sample.scalars.add("rotation", np.random.randn()) show_sample(sample) # %% # Add a more scalars to this Sample -sample.add_scalar("speed", np.random.randn()) -sample.add_scalar("other", np.random.randn()) +sample.scalars.add("speed", np.random.randn()) +sample.scalars.add("other", np.random.randn()) show_sample(sample) @@ -223,9 +223,9 @@ def show_sample(sample: Sample): # %% # It will look for a default base if no base and zone are given -print(f"{sample.get_scalar_names() = }") -print(f"{sample.get_scalar('omega') = }") -print(f"{sample.get_scalar('rotation') = }") +print(f"{sample.scalars.get_names() = }") +print(f"{sample.scalars.get('omega') = }") +print(f"{sample.scalars.get('rotation') = }") # %% [markdown] # ### Access fields data in Sample @@ -551,7 +551,7 @@ def show_sample(sample: Sample): show_sample(new_sample) -new_sample.add_scalar("a", 2.1) +new_sample.scalars.add("a", 2.1) serialized_sample = new_sample.model_dump() unserialized_sample = Sample.model_validate(serialized_sample) diff --git a/examples/convert_users_data_example.py b/examples/convert_users_data_example.py index 70e54638..a01b7d5e 100644 --- a/examples/convert_users_data_example.py +++ b/examples/convert_users_data_example.py @@ -125,10 +125,10 @@ # Add random scalar values to the sample for sname in in_scalars_names: - sample.add_scalar(sname, np.random.randn()) + sample.scalars.add(sname, np.random.randn()) for sname in out_scalars_names: - sample.add_scalar(sname, np.random.randn()) + sample.scalars.add(sname, np.random.randn()) # Add random field values to the sample for j, sname in enumerate(out_fields_names): diff --git a/examples/pipelines/pipeline.py b/examples/pipelines/pipeline.py index 7ed63620..31719411 100644 --- a/examples/pipelines/pipeline.py +++ b/examples/pipelines/pipeline.py @@ -112,7 +112,7 @@ dataset_train = dataset_train.from_features_identifier(all_feature_id) print("dataset_train:", dataset_train) -print("scalar names =", dataset_train.get_scalar_names()) +print("scalar names =", dataset_train.scalars.get_names()) print("field names =", dataset_train.get_field_names()) @@ -143,7 +143,7 @@ preprocessed_dataset = preprocessor.fit_transform(dataset_train) print("preprocessed_dataset:", preprocessed_dataset) -print("scalar names =", preprocessed_dataset.get_scalar_names()) +print("scalar names =", preprocessed_dataset.scalars.get_names()) print("field names =", preprocessed_dataset.get_field_names()) diff --git a/examples/utils/init_with_tabular_example.py b/examples/utils/init_with_tabular_example.py index 6070fa07..070df75e 100644 --- a/examples/utils/init_with_tabular_example.py +++ b/examples/utils/init_with_tabular_example.py @@ -62,12 +62,12 @@ def dprint(name: str, dictio: dict): # %% # Access and display the value of a particular scalar within a sample -scalar_value = sample_1.get_scalar("scalar_0") +scalar_value = sample_1.scalars.get("scalar_0") print("Scalar 'scalar_0' in Sample 1:", scalar_value) # %% # Retrieve tabular data from the dataset based on scalar names scalar_names = ["scalar_1", "scalar_3", "scalar_5"] -tabular_data_subset = dataset.get_scalars_to_tabular(scalar_names) +tabular_data_subset = dataset.scalars.gets_to_tabular(scalar_names) print("Tabular Data Subset for Scalars 1, 3, and 5:") dprint("tabular_data_subset", tabular_data_subset) diff --git a/examples/utils/stats_example.py b/examples/utils/stats_example.py index 1b032e6d..ed45b699 100644 --- a/examples/utils/stats_example.py +++ b/examples/utils/stats_example.py @@ -116,8 +116,8 @@ def sprint(stats: dict): spatial_shape_max = 5 # for sample in samples: - sample.add_scalar("test_scalar", np.random.randn()) - sample.add_scalar("test_ND_scalar", np.random.randn(3)) + sample.scalars.add("test_scalar", np.random.randn()) + sample.scalars.add("test_ND_scalar", np.random.randn(3)) sample.init_base(2, 3,) zone_shape = np.array([0, 0, 0]) sample.init_zone(zone_shape) @@ -149,7 +149,7 @@ def sprint(stats: dict): samples = [Sample() for _ in range(nb_samples)] for sample in samples: - sample.add_scalar("test_scalar", np.random.randn()) + sample.scalars.add("test_scalar", np.random.randn()) sample.init_base(2, 3,) zone_shape = np.array([0, 0, 0]) sample.init_zone(zone_shape) diff --git a/src/plaid/containers/collections.py b/src/plaid/containers/collections.py index 51df3f63..bf1d76d8 100644 --- a/src/plaid/containers/collections.py +++ b/src/plaid/containers/collections.py @@ -1,15 +1,9 @@ -"""Module for implementing collections.""" +"""Module for implementing collections of features within a Sample.""" import logging from typing import Optional, Union -import CGNS.PAT.cgnskeywords as CGK -import CGNS.PAT.cgnslib as CGL -import CGNS.PAT.cgnsutils as CGU -import numpy as np - -from plaid.types import CGNSNode, CGNSTree, Field, Scalar -from plaid.utils import cgns_helper as CGH +from plaid.types import Scalar logger = logging.getLogger(__name__) logging.basicConfig( @@ -36,11 +30,15 @@ def _check_names(names: Union[str, list[str]]): ) -class ScalarCollection: - """Manager object for scalars.""" +class SampleScalars: + """A container for scalar features within a Sample. + + Provides dict-like operations for adding, retrieving, and removing scalars. + Names must be unique and may not contain the character ``/``. + """ - def __init__(self): - self.features: dict[str, Scalar] = {} + def __init__(self, scalars: Optional[dict[str, Scalar]]) -> None: + self._scalars: dict[str, Scalar] = scalars if scalars is not None else {} def add(self, name: str, value: Scalar) -> None: """Add a scalar value to a dictionary. @@ -49,11 +47,8 @@ def add(self, name: str, value: Scalar) -> None: name (str): The name of the scalar value. value (Scalar): The scalar value to add or update in the dictionary. """ - _check_names([name]) - if self._scalars is None: - self._scalars = {name: value} - else: - self._scalars[name] = value + _check_names(name) + self._scalars[name] = value def remove(self, name: str) -> Scalar: """Delete a scalar value from the dictionary. @@ -67,15 +62,12 @@ def remove(self, name: str) -> Scalar: Returns: Scalar: The value of the deleted scalar. """ - if self._scalars is None: - raise KeyError("There is no scalar inside this sample.") - if name not in self._scalars: raise KeyError(f"There is no scalar value with name {name}.") return self._scalars.pop(name) - def get(self, name: str) -> Scalar: + def get(self, name: str) -> Scalar | None: """Retrieve a scalar value associated with the given name. Args: @@ -84,499 +76,12 @@ def get(self, name: str) -> Scalar: Returns: Scalar or None: The scalar value associated with the given name, or None if the name is not found. """ - if (self._scalars is None) or (name not in self._scalars): - return None - else: - return self._scalars[name] + return self._scalars.get(name) def get_names(self) -> list[str]: """Get a set of scalar names available in the object. Returns: - set[str]: A set containing the names of the available scalars. - """ - if self._scalars is None: - return [] - else: - res = sorted(self._scalars.keys()) - return res - - -class FieldCollection: - """Manager object for fields.""" - - def __init__(self): - self.features: dict[str, Field] = {} - self._defaults: dict = { - "active_base": None, - "active_zone": None, - "active_time": None, - } - - def add( - self, - name: str, - field: Field, - zone_name: Optional[str] = None, - base_name: Optional[str] = None, - location: str = "Vertex", - time: Optional[float] = None, - warning_overwrite=True, - ) -> None: - """Add a field to a specified zone in the grid. - - Args: - name (str): The name of the field to be added. - field (Field): The field data to be added. - zone_name (str, optional): The name of the zone where the field will be added. Defaults to None. - base_name (str, optional): The name of the base where the zone is located. Defaults to None. - location (str, optional): The grid location where the field will be stored. Defaults to 'Vertex'. - Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` - time (float, optional): The time associated with the field. Defaults to 0. - warning_overwrite (bool, optional): Show warning if an preexisting field is being overwritten - - Raises: - KeyError: Raised if the specified zone does not exist in the given base. - """ - _check_names([name]) - # init_tree will look for default time - self.init_tree(time) - - # get_zone will look for default zone_name, base_name and time - zone_node = self.get_zone(zone_name, base_name, time) - - if zone_node is None: - raise KeyError( - f"there is no Zone with name {zone_name} in base {base_name}. Did you check topological and physical dimensions ?" - ) - - # solution_paths = CGU.getPathsByTypeOrNameList(self._tree, '/.*/.*/FlowSolution_t') - solution_paths = CGU.getPathsByTypeSet(zone_node, "FlowSolution_t") - has_FlowSolution_with_location = False - if len(solution_paths) > 0: - for s_path in solution_paths: - val_location = ( - CGU.getValueByPath(zone_node, f"{s_path}/GridLocation") - .tobytes() - .decode() - ) - if val_location == location: - has_FlowSolution_with_location = True - - if not (has_FlowSolution_with_location): - CGL.newFlowSolution(zone_node, f"{location}Fields", gridlocation=location) - - solution_paths = CGU.getPathsByTypeSet(zone_node, "FlowSolution_t") - assert len(solution_paths) > 0 - - for s_path in solution_paths: - val_location = ( - CGU.getValueByPath(zone_node, f"{s_path}/GridLocation") - .tobytes() - .decode() - ) - - if val_location != location: - continue - - field_node = CGU.getNodeByPath(zone_node, f"{s_path}/{name}") - - if field_node is None: - flow_solution_node = CGU.getNodeByPath(zone_node, s_path) - CGL.newDataArray(flow_solution_node, name, np.asfortranarray(field)) - else: - if warning_overwrite: - logger.warning( - f"field node with name {name} already exists -> data will be replaced" - ) - CGU.setValue(field_node, np.asfortranarray(field)) - - def remove( - self, - name: str, - zone_name: Optional[str] = None, - base_name: Optional[str] = None, - location: str = "Vertex", - time: Optional[float] = None, - ) -> CGNSTree: - """Delete a field from a specified zone in the grid. - - Args: - name (str): The name of the field to be deleted. - zone_name (str, optional): The name of the zone from which the field will be deleted. Defaults to None. - base_name (str, optional): The name of the base where the zone is located. Defaults to None. - location (str, optional): The grid location where the field is stored. Defaults to 'Vertex'. - Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` - time (float, optional): The time associated with the field. Defaults to 0. - - Raises: - KeyError: Raised if the specified zone or field does not exist in the given base. - - Returns: - CGNSTree: The tree at the provided time (without the deleted node) - """ - # get_zone will look for default zone_name, base_name, and time - zone_node = self.get_zone(zone_name, base_name, time) - time = self.get_time_assignment(time) - mesh_tree = self._meshes[time] - - if zone_node is None: - raise KeyError( - f"There is no Zone with name {zone_name} in base {base_name}." - ) - - solution_paths = CGU.getPathsByTypeSet(zone_node, [CGK.FlowSolution_t]) - - updated_tree = None - for s_path in solution_paths: - if ( - CGU.getValueByPath(zone_node, f"{s_path}/GridLocation") - .tobytes() - .decode() - == location - ): - field_node = CGU.getNodeByPath(zone_node, f"{s_path}/{name}") - if field_node is not None: - updated_tree = CGU.nodeDelete(mesh_tree, field_node) - - # If the function reaches here, the field was not found - if updated_tree is None: - raise KeyError(f"There is no field with name {name} in the specified zone.") - - return updated_tree - - def get( - self, - name: str, - zone_name: Optional[str] = None, - base_name: Optional[str] = None, - location: str = "Vertex", - time: Optional[float] = None, - ) -> Field: - """Retrieve a field with a specified name from a given zone, base, location, and time. - - Args: - name (str): The name of the field to retrieve. - zone_name (str, optional): The name of the zone to search for. Defaults to None. - base_name (str, optional): The name of the base to search for. Defaults to None. - location (str, optional): The location at which to retrieve the field. Defaults to 'Vertex'. - Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` - time (float, optional): The time value to consider when searching for the field. If a specific time is not provided, the method will display the tree structure for the default time step. - - Returns: - Field: A set containing the names of the fields that match the specified criteria. - """ - # get_zone will look for default time - search_node = self.get_zone(zone_name, base_name, time) - if search_node is None: - return None - - is_empty = True - full_field = [] - - solution_paths = CGU.getPathsByTypeSet(search_node, [CGK.FlowSolution_t]) - - for f_path in solution_paths: - if ( - CGU.getValueByPath(search_node, f_path + "/GridLocation") - .tobytes() - .decode() - == location - ): - field = CGU.getValueByPath(search_node, f_path + "/" + name) - - if field is None: - field = np.empty((0,)) - else: - is_empty = False - full_field.append(field) - - if is_empty: - return None - else: - return np.concatenate(full_field) - - def get_names( - self, - zone_name: Optional[str] = None, - base_name: Optional[str] = None, - location: str = "Vertex", - time: Optional[float] = None, - ) -> list[str]: - """Get a set of field names associated with a specified zone, base, location, and time. - - Args: - zone_name (str, optional): The name of the zone to search for. Defaults to None. - base_name (str, optional): The name of the base to search for. Defaults to None. - location (str, optional): The desired grid location where the field is defined. Defaults to 'Vertex'. - Possible values : :py:const:`plaid.constants.CGNS_FIELD_LOCATIONS` - time (float, optional): The specific time at which to retrieve field names. If a specific time is not provided, the method will display the tree structure for the default time step. - - Returns: - set[str]: A set containing the names of the fields that match the specified criteria. + list[str]: A set containing the names of the available scalars. """ - - def get_field_names_one_base(base_name: str) -> list[str]: - # get_zone will look for default zone_name, base_name, time - search_node = self.get_zone(zone_name, base_name, time) - if search_node is None: # pragma: no cover - return [] - - names = [] - solution_paths = CGU.getPathsByTypeSet(search_node, [CGK.FlowSolution_t]) - for f_path in solution_paths: - if ( - CGU.getValueByPath(search_node, f_path + "/GridLocation") - .tobytes() - .decode() - != location - ): - continue - f_node = CGU.getNodeByPath(search_node, f_path) - for path in CGU.getPathByTypeFilter(f_node, CGK.DataArray_t): - field_name = path.split("/")[-1] - if not (field_name == "GridLocation"): - names.append(field_name) - return names - - if base_name is None: - # get_base_names will look for default time - base_names = self.get_base_names(time=time) - else: - base_names = [base_name] - - all_names = [] - for bn in base_names: - all_names += get_field_names_one_base(bn) - - all_names.sort() - all_names = list(set(all_names)) - - return all_names - - def init_tree(self, time: float = None) -> CGNSTree: - """Initialize a CGNS tree structure at a specified time step or create a new one if it doesn't exist. - - Args: - time (float, optional): The time step for which to initialize the CGNS tree structure. If a specific time is not provided, the method will display the tree structure for the default time step. - - Returns: - CGNSTree (list): The initialized or existing CGNS tree structure for the specified time step. - """ - time = self.get_time_assignment(time) - - if self._meshes is None: - self._meshes = {time: CGL.newCGNSTree()} - self._links = {time: None} - self._paths = {time: None} - elif time not in self._meshes: - self._meshes[time] = CGL.newCGNSTree() - self._links[time] = None - self._paths[time] = None - - return self._meshes[time] - - def get_zone( - self, zone_name: str = None, base_name: str = None, time: float = None - ) -> CGNSNode: - """Retrieve a CGNS Zone node by its name within a specific Base and time. - - Args: - zone_name (str, optional): The name of the Zone node to retrieve. If not specified, checks that there is **at most** one zone in the base, else raises an error. Defaults to None. - base_name (str, optional): The Base in which to seek to zone retrieve. If not specified, checks that there is **at most** one base, else raises an error. Defaults to None. - time (float, optional): Time at which you want to retrieve the Zone node. - - Returns: - CGNSNode: Returns a CGNS Zone node if found; otherwise, returns None. - """ - # get_base will look for default base_name and time - base_node = self.get_base(base_name, time) - if base_node is None: - logger.warning(f"No base with name {base_name} and this tree") - return None - - # _zone_attribution will look for default base_name - zone_name = self.get_zone_assignment(zone_name, base_name, time) - if zone_name is None: - logger.warning(f"No zone with name {zone_name} and this base ({base_name})") - return None - - return CGU.getNodeByPath(base_node, zone_name) - - def get_time_assignment(self, time: float = None) -> float: - """Retrieve the default time for the CGNS operations. - - If there are available time steps, it will return the first one; otherwise, it will return 0.0. - - Args: - time (str, optional): The time value provided for the operation. If not provided, the default time set in the system will be used. - - Returns: - float: The attributed time. - - Note: - - The default time step is used as a reference point for many CGNS operations. - - It is important for accessing and visualizing data at specific time points in a simulation. - """ - if self._defaults["active_time"] is None and time is None: - timestamps = self.get_all_mesh_times() - return sorted(timestamps)[0] if len(timestamps) > 0 else 0.0 - return self._defaults["active_time"] if time is None else time - - def get_base_names( - self, full_path: bool = False, unique: bool = False, time: float = None - ) -> list[str]: - """Return Base names. - - Args: - full_path (bool, optional): If True, returns full paths instead of only Base names. Defaults to False. - unique (bool, optional): If True, returns unique names instead of potentially duplicated names. Defaults to False. - time (float, optional): The time at which to check for the Base. If a specific time is not provided, the method will display the tree structure for the default time step. - - Returns: - list[str]: - """ - time = self.get_time_assignment(time) - - if self._meshes is not None: - if self._meshes[time] is not None: - return CGH.get_base_names( - self._meshes[time], full_path=full_path, unique=unique - ) - else: - return [] - - def get_zone_assignment( - self, zone_name: str = None, base_name: str = None, time: float = None - ) -> str: - """Retrieve the default zone name for the CGNS operations. - - This function calculates the attributed zone for a specific operation based on the - default zone set in the system, within the specified base. - - Args: - zone_name (str, optional): The name of the zone to attribute the operation to. If not provided, the default zone set in the system within the specified base will be used. - base_name (str, optional): The name of the base within which the zone should be attributed. If not provided, the default base set in the system will be used. - time (str, optional): The time value provided for the operation. If not provided, the default time set in the system will be used. - - Raises: - KeyError: If no default zone can be determined based on the provided or default values. - KeyError: If no zone node is found after following given and default parameters. - - Returns: - str: The attributed zone name. - - Note: - - If neither a specific zone name nor a specific base name is provided, the function will use the default zone provided by the user. - - In case the default zone does not exist: If no specific time is provided, the function will use the default time provided by the user. - """ - zone_name = zone_name or self._defaults.get("active_zone") - - if zone_name: - return zone_name - - base_name = self.get_base_assignment(base_name, time) - zone_names = self.get_zone_names(base_name, time=time) - if len(zone_names) == 0: - return None - elif len(zone_names) == 1: - # logging.info(f"No default zone provided. Taking the only zone available: {zone_names[0]} in default base: {base_name}") - return zone_names[0] - - raise KeyError( - f"No default zone provided among {zone_names} in the default base: {base_name}" - ) - - def get_base(self, base_name: str = None, time: float = None) -> CGNSNode: - """Return Base node named `base_name`. - - If `base_name` is not specified, checks that there is **at most** one base, else raises an error. - - Args: - base_name (str, optional): The name of the Base node to retrieve. Defaults to None. Defaults to None. - time (float, optional): Time at which you want to retrieve the Base node. If a specific time is not provided, the method will display the tree structure for the default time step. - - Returns: - CGNSNode or None: The Base node with the specified name or None if it is not found. - """ - time = self.get_time_assignment(time) - base_name = self.get_base_assignment(base_name, time) - - if (self._meshes is None) or (self._meshes[time] is None): - logger.warning(f"No base with name {base_name} and this tree") - return None - - return CGU.getNodeByPath(self._meshes[time], f"/CGNSTree/{base_name}") - - def get_base_assignment(self, base_name: str = None, time: float = None) -> str: - """Retrieve the default base name for the CGNS operations. - - This function calculates the attributed base for a specific operation based on the - default base set in the system. - - Args: - base_name (str, optional): The name of the base to attribute the operation to. If not provided, the default base set in the system will be used. - time (str, optional): The time value provided for the operation. If not provided, the default time set in the system will be used. - - Raises: - KeyError: If no default base can be determined based on the provided or default. - KeyError: If no base node is found after following given and default parameters. - - Returns: - str: The attributed base name. - - Note: - - If no specific base name is provided, the function will use the default base provided by the user. - - In case the default base does not exist: If no specific time is provided, the function will use the default time provided by the user. - """ - base_name = base_name or self._defaults.get("active_base") - - if base_name: - return base_name - - base_names = self.get_base_names(time=time) - if len(base_names) == 0: - return None - elif len(base_names) == 1: - # logging.info(f"No default base provided. Taking the only base available: {base_names[0]}") - return base_names[0] - - raise KeyError(f"No default base provided among {base_names}") - - def get_zone_names( - self, - base_name: str = None, - full_path: bool = False, - unique: bool = False, - time: float = None, - ) -> list[str]: - """Return list of Zone names in Base named `base_name` with specific time. - - Args: - base_name (str, optional): Name of Base where to search Zones. If not specified, checks if there is at most one Base. Defaults to None. - full_path (bool, optional): If True, returns full paths instead of only Zone names. Defaults to False. - unique (bool, optional): If True, returns unique names instead of potentially duplicated names. Defaults to False. - time (float, optional): The time at which to check for the Zone. If a specific time is not provided, the method will display the tree structure for the default time step. - - Returns: - list[str]: List of Zone names in Base named `base_name`, empty if there is none or if the Base doesn't exist. - """ - zone_paths = [] - - # get_base will look for default base_name and time - base_node = self.get_base(base_name, time) - if base_node is not None: - z_paths = CGU.getPathsByTypeSet(base_node, "CGNSZone_t") - for pth in z_paths: - s_pth = pth.split("/") - assert len(s_pth) == 2 - assert s_pth[0] == base_name or base_name is None - if full_path: - zone_paths.append(pth) - else: - zone_paths.append(s_pth[1]) - - if unique: - return list(set(zone_paths)) - else: - return zone_paths + return sorted(self._scalars.keys()) diff --git a/src/plaid/containers/dataset.py b/src/plaid/containers/dataset.py index 0fdcaf90..b1d3e7d6 100644 --- a/src/plaid/containers/dataset.py +++ b/src/plaid/containers/dataset.py @@ -365,7 +365,7 @@ def get_scalar_names(self, ids: Optional[list[int]] = None) -> list[str]: scalars_names = [] for sample in self.get_samples(ids, as_list=True): - s_names = sample.get_scalar_names() + s_names = sample.scalars.get_names() for s_name in s_names: if s_name not in scalars_names: scalars_names.append(s_name) @@ -511,7 +511,7 @@ def add_tabular_scalars( for i_samp in range(nb_samples): sample = Sample() for name in names: - sample.add_scalar(name, name_to_ids[name][i_samp]) + sample.scalars.add(name, name_to_ids[name][i_samp]) self.add_sample(sample) def get_scalars_to_tabular( @@ -547,7 +547,7 @@ def get_scalars_to_tabular( res = np.empty(nb_samples) res.fill(None) for i_, id in enumerate(sample_ids): - val = self[id].get_scalar(s_name) + val = self[id].scalars.get(s_name) if val is not None: res[i_] = val named_tabular[s_name] = res diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index 0e0b45a2..3787e842 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -38,6 +38,7 @@ CGNS_ELEMENT_NAMES, CGNS_FIELD_LOCATIONS, ) +from plaid.containers.collections import SampleScalars, _check_names from plaid.containers.utils import get_feature_type_and_details_from from plaid.types import ( CGNSNode, @@ -61,24 +62,6 @@ ) -def _check_names(names: Union[str, list[str]]): - """Check that names do not contain invalid character ``/``. - - Args: - names (Union[str, list[str]]): The names to check. - - Raises: - ValueError: If any name contains the invalid character ``/``. - """ - if isinstance(names, str): - names = [names] - for name in names: - if (name is not None) and ("/" in name): - raise ValueError( - f"feature_names containing `/` are not allowed, but {name=}, you should first replace any occurence of `/` with something else, for example: `name.replace('/','__')`" - ) - - def _read_index(pyTree: list, dim: list[int]): """Read Index Array or Index Range from CGNS. @@ -161,8 +144,11 @@ def __init__( directory_path: Optional[Union[str, Path]] = None, mesh_base_name: str = "Base", mesh_zone_name: str = "Zone", - links: dict[float, list[LinkType]] = None, - paths: dict[float, list[PathType]] = None, + meshes: Optional[dict[float, CGNSTree]] = None, + scalars: Optional[dict[str, Scalar]] = None, + time_series: Optional[dict[str, TimeSeries]] = None, + links: Optional[dict[float, list[LinkType]]] = None, + paths: Optional[dict[float, list[PathType]]] = None, ) -> None: """Initialize an empty :class:`Sample `. @@ -199,18 +185,18 @@ def __init__( self._mesh_base_name: str = mesh_base_name self._mesh_zone_name: str = mesh_zone_name - self._meshes: dict[float, CGNSTree] = {} - self._scalars: dict[str, Scalar] = {} - self._time_series: dict[str, TimeSeries] = {} + self._meshes: dict[float, CGNSTree] | None = meshes + self._scalars = SampleScalars(scalars) + self._time_series: dict[str, TimeSeries] | None = time_series - self._links: dict[float, list[LinkType]] = links - self._paths: dict[float, list[PathType]] = paths + self._links: dict[float, list[LinkType]] | None = links + self._paths: dict[float, list[PathType]] | None = paths if directory_path is not None: directory_path = Path(directory_path) self.load(directory_path) - self._defaults: dict = { + self._defaults: dict[str, Optional[str]] = { "active_base": None, "active_zone": None, "active_time": None, @@ -218,6 +204,10 @@ def __init__( self._extra_data = None + @property + def scalars(self) -> SampleScalars: + return self._scalars + def copy(self) -> Self: """Create a deep copy of the sample. @@ -1142,66 +1132,6 @@ def get_zone_type( ) return CGU.getValueByPath(zone_node, "ZoneType").tobytes().decode() - # -------------------------------------------------------------------------# - def get_scalar_names(self) -> set[str]: - """Get a set of scalar names available in the object. - - Returns: - set[str]: A set containing the names of the available scalars. - """ - if self._scalars is None: - return [] - else: - res = sorted(self._scalars.keys()) - return res - - def get_scalar(self, name: str) -> Scalar: - """Retrieve a scalar value associated with the given name. - - Args: - name (str): The name of the scalar value to retrieve. - - Returns: - Scalar or None: The scalar value associated with the given name, or None if the name is not found. - """ - if (self._scalars is None) or (name not in self._scalars): - return None - else: - return self._scalars[name] - - def add_scalar(self, name: str, value: Scalar) -> None: - """Add a scalar value to a dictionary. - - Args: - name (str): The name of the scalar value. - value (Scalar): The scalar value to add or update in the dictionary. - """ - _check_names([name]) - if self._scalars is None: - self._scalars = {name: value} - else: - self._scalars[name] = value - - def del_scalar(self, name: str) -> Scalar: - """Delete a scalar value from the dictionary. - - Args: - name (str): The name of the scalar value to be deleted. - - Raises: - KeyError: Raised when there is no scalar / there is no scalar with the provided name. - - Returns: - Scalar: The value of the deleted scalar. - """ - if self._scalars is None: - raise KeyError("There is no scalar inside this sample.") - - if name not in self._scalars: - raise KeyError(f"There is no scalar value with name {name}.") - - return self._scalars.pop(name) - # -------------------------------------------------------------------------# def get_time_series_names(self) -> set[str]: """Get the names of time series associated with the object. @@ -1770,7 +1700,7 @@ def get_all_features_identifiers( list[FeatureIdentifier]: A list of dictionaries containing the identifiers of all features in the sample. """ all_features_identifiers = [] - for sn in self.get_scalar_names(): + for sn in self.scalars.get_names(): all_features_identifiers.append({"type": "scalar", "name": sn}) for tsn in self.get_time_series_names(): all_features_identifiers.append({"type": "time_series", "name": tsn}) @@ -1830,7 +1760,7 @@ def get_feature_from_string_identifier( ":://.../" Supported feature types: - - "scalar": expects 1 detail → `get_scalar(name)` + - "scalar": expects 1 detail → `scalars.get(name)` - "time_series": expects 1 detail → `get_time_series(name)` - "field": up to 5 details → `get_field(name, base_name, zone_name, location, time)` - "nodes": up to 3 details → `get_nodes(base_name, zone_name, time)` @@ -1861,7 +1791,10 @@ def get_feature_from_string_identifier( arg_names = AUTHORIZED_FEATURE_INFOS[feature_type] if feature_type == "scalar": - return self.get_scalar(feature_details[0]) + val = self.scalars.get(feature_details[0]) + if val is None: + raise KeyError(f"Unknown scalar {feature_details[0]}") + return val elif feature_type == "time_series": return self.get_time_series(feature_details[0]) elif feature_type == "field": @@ -1881,7 +1814,7 @@ def get_feature_from_identifier( """Retrieve a feature object based on a structured identifier dictionary. The `feature_identifier` must include a `"type"` key specifying the feature kind: - - `"scalar"` → calls `get_scalar(name)` + - `"scalar"` → calls `scalars.get(name)` - `"time_series"` → calls `get_time_series(name)` - `"field"` → calls `get_field(name, base_name, zone_name, location, time)` - `"nodes"` → calls `get_nodes(base_name, zone_name, time)` @@ -1907,7 +1840,7 @@ def get_feature_from_identifier( ) if feature_type == "scalar": - return self.get_scalar(**feature_details) + return self.scalars.get(**feature_details) elif feature_type == "time_series": return self.get_time_series(**feature_details) elif feature_type == "field": @@ -1921,7 +1854,7 @@ def get_features_from_identifiers( """Retrieve features based on a list of structured identifier dictionaries. Elements of `feature_identifiers` must include a `"type"` key specifying the feature kind: - - `"scalar"` → calls `get_scalar(name)` + - `"scalar"` → calls `scalars.get(name)` - `"time_series"` → calls `get_time_series(name)` - `"field"` → calls `get_field(name, base_name, zone_name, location, time)` - `"nodes"` → calls `get_nodes(base_name, zone_name, time)` @@ -1950,7 +1883,7 @@ def get_features_from_identifiers( features = [] for feature_type, feature_details in all_features_info: if feature_type == "scalar": - features.append(self.get_scalar(**feature_details)) + features.append(self.scalars.get(**feature_details)) elif feature_type == "time_series": features.append(self.get_time_series(**feature_details)) elif feature_type == "field": @@ -1986,7 +1919,7 @@ def _add_feature( if feature_type == "scalar": if safe_len(feature) == 1: feature = feature[0] - self.add_scalar(**feature_details, value=feature) + self.scalars.add(**feature_details, value=feature) elif feature_type == "time_series": self.add_time_series( **feature_details, time_sequence=feature[0], values=feature[1] @@ -2168,11 +2101,11 @@ def save(self, dir_path: Union[str, Path], overwrite: bool = False) -> None: ) logger.debug(f"save -> {status=}") - scalars_names = self.get_scalar_names() + scalars_names = self.scalars.get_names() if len(scalars_names) > 0: scalars = [] for s_name in scalars_names: - scalars.append(self.get_scalar(s_name)) + scalars.append(self.scalars.get(s_name)) scalars = np.array(scalars).reshape((1, -1)) header = ",".join(scalars_names) np.savetxt( @@ -2281,7 +2214,7 @@ def load(self, dir_path: Union[str, Path]) -> None: scalars_fname, dtype=float, skiprows=1, delimiter="," ).reshape((-1,)) for name, value in zip(names, scalars): - self.add_scalar(name, value) + self.scalars.add(name, value) time_series_files = list(dir_path.glob("time_series_*.csv")) for ts_fname in time_series_files: @@ -2303,7 +2236,7 @@ def __str__(self) -> str: str_repr = "Sample(" # scalars - nb_scalars = len(self.get_scalar_names()) + nb_scalars = len(self.scalars.get_names()) str_repr += f"{nb_scalars} scalar{'' if nb_scalars == 1 else 's'}, " # time series @@ -2357,7 +2290,7 @@ def serialize_model(self): "mesh_base_name": self._mesh_base_name, "mesh_zone_name": self._mesh_zone_name, "meshes": self._meshes, - "scalars": self._scalars, + "scalars": self.scalars._scalars, "time_series": self._time_series, "links": self._links, "paths": self._paths, diff --git a/src/plaid/post/bisect.py b/src/plaid/post/bisect.py index 4d195260..b73e4378 100644 --- a/src/plaid/post/bisect.py +++ b/src/plaid/post/bisect.py @@ -48,10 +48,10 @@ def prepare_datasets( for i_sample in tqdm(range(n_samples), disable=not (verbose)): for sname in out_scalars_names: - ref = ref_dataset[i_sample].get_scalar(sname) + ref = ref_dataset[i_sample].scalars.get(sname) ref_out_scalars[sname].append(ref) - pred = pred_dataset[i_sample].get_scalar(sname) + pred = pred_dataset[i_sample].scalars.get(sname) pred_out_scalars[sname].append(pred) return ref_out_scalars, pred_out_scalars, out_scalars_names diff --git a/src/plaid/problem_definition.py b/src/plaid/problem_definition.py index fbca9fbd..3db4d6dc 100644 --- a/src/plaid/problem_definition.py +++ b/src/plaid/problem_definition.py @@ -866,7 +866,7 @@ def get_all_indices(self) -> list[int]: # """ # res = {} # for _,feature_name in self.get_input_scalars_names(feature_type='scalar'): - # res.update(self.get_scalars_to_tabular(feature_name, sample_ids)) + # res.update(self.scalars.gets_to_tabular(feature_name, sample_ids)) # if as_dataframe: # res = pandas.DataFrame(res) @@ -882,7 +882,7 @@ def get_all_indices(self) -> list[int]: # """ # res = {} # for _,feature_name in self.get_output_scalars_names(feature_type='scalar'): - # res.update(self.get_scalars_to_tabular(feature_name, sample_ids)) + # res.update(self.scalars.gets_to_tabular(feature_name, sample_ids)) # if as_dataframe: # res = pandas.DataFrame(res) diff --git a/src/plaid/utils/init_with_tabular.py b/src/plaid/utils/init_with_tabular.py index ce766457..48cf9838 100644 --- a/src/plaid/utils/init_with_tabular.py +++ b/src/plaid/utils/init_with_tabular.py @@ -62,7 +62,7 @@ def initialize_dataset_with_tabular_data( for i in range(nb_samples): sample = Sample() for scalar_name, value in tabular_data.items(): - sample.add_scalar(scalar_name, value[i]) + sample.scalars.add(scalar_name, value[i]) dataset.add_sample(sample) # TODO: diff --git a/src/plaid/utils/stats.py b/src/plaid/utils/stats.py index 56ddc626..a1b2ed5b 100644 --- a/src/plaid/utils/stats.py +++ b/src/plaid/utils/stats.py @@ -376,10 +376,10 @@ def _process_scalar_data(self, sample: Sample, data_dict: dict[str, list]) -> No sample (Sample): Sample containing scalar data data_dict (dict[str, list]): Dictionary to store processed data """ - for name in sample.get_scalar_names(): + for name in sample.scalars.get_names(): if name not in data_dict: data_dict[name] = [] - value = sample.get_scalar(name) + value = sample.scalars.get(name) if value is not None: data_dict[name].append(np.array(value).reshape((1, -1))) diff --git a/tests/bridges/test_huggingface_bridge.py b/tests/bridges/test_huggingface_bridge.py index d54161cd..2422b21d 100644 --- a/tests/bridges/test_huggingface_bridge.py +++ b/tests/bridges/test_huggingface_bridge.py @@ -70,7 +70,7 @@ def assert_plaid_dataset(self, ds, pbdef): def assert_sample(self, sample): assert isinstance(sample, Sample) - assert sample.get_scalar_names()[0] == "test_scalar" + assert sample.scalars.get_names()[0] == "test_scalar" assert "test_field_same_size" in sample.get_field_names() assert sample.get_field("test_field_same_size").shape[0] == 17 diff --git a/tests/conftest.py b/tests/conftest.py index c8fb9bc7..ab372d29 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -29,8 +29,8 @@ def generate_samples(nb: int, zone_name: str, base_name: str) -> list[Sample]: sample = Sample() sample.init_base(3, 3, base_name) sample.init_zone(np.array([0, 0, 0]), zone_name=zone_name, base_name=base_name) - sample.add_scalar("test_scalar", float(i)) - sample.add_scalar("test_scalar_2", float(i**2)) + sample.scalars.add("test_scalar", float(i)) + sample.scalars.add("test_scalar_2", float(i**2)) sample.add_time_series( "test_time_series_1", np.arange(11, dtype=float), diff --git a/tests/containers/test_dataset.py b/tests/containers/test_dataset.py index 8930509e..6c06fc08 100644 --- a/tests/containers/test_dataset.py +++ b/tests/containers/test_dataset.py @@ -31,7 +31,7 @@ def current_directory(): def compare_two_samples(sample_1: Sample, sample_2: Sample): assert set(sample_1.get_all_mesh_times()) == set(sample_2.get_all_mesh_times()) - assert set(sample_1.get_scalar_names()) == set(sample_2.get_scalar_names()) + assert set(sample_1.scalars.get_names()) == set(sample_2.scalars.get_names()) assert set(sample_1.get_field_names()) == set(sample_2.get_field_names()) assert set(sample_1.get_time_series_names()) == set( sample_2.get_time_series_names() diff --git a/tests/containers/test_sample.py b/tests/containers/test_sample.py index fc83a882..5eb61d35 100644 --- a/tests/containers/test_sample.py +++ b/tests/containers/test_sample.py @@ -19,9 +19,9 @@ from plaid.containers.sample import ( Sample, _check_names, - read_index, - read_index_array, - read_index_range, + _read_index, + _read_index_array, + _read_index_range, ) # %% Fixtures @@ -49,7 +49,7 @@ def other_sample(): @pytest.fixture() def sample_with_scalar(sample): - sample.add_scalar("test_scalar_1", np.random.randn()) + sample.scalars.add("test_scalar_1", np.random.randn()) return sample @@ -105,8 +105,8 @@ def sample_with_tree3d(sample, tree3d): def sample_with_tree_and_scalar_and_time_series( sample_with_tree, ): - sample_with_tree.add_scalar("r", np.random.randn()) - sample_with_tree.add_scalar("test_scalar_1", np.random.randn()) + sample_with_tree.scalars.add("r", np.random.randn()) + sample_with_tree.scalars.add("test_scalar_1", np.random.randn()) sample_with_tree.add_time_series( "test_time_series_1", np.arange(111, dtype=float), np.random.randn(111) ) @@ -128,15 +128,15 @@ def test_check_names(): def test_read_index(tree, physical_dim): - read_index(tree, physical_dim) + _read_index(tree, physical_dim) def test_read_index_array(tree): - read_index_array(tree) + _read_index_array(tree) def test_read_index_range(tree, physical_dim): - read_index_range(tree, physical_dim) + _read_index_range(tree, physical_dim) @pytest.fixture() @@ -596,43 +596,43 @@ def test_get_zone(self, sample, zone_name, base_name): # -------------------------------------------------------------------------# def test_get_scalar_names(self, sample): - assert sample.get_scalar_names() == [] + assert sample.scalars.get_names() == [] def test_get_scalar_empty(self, sample): - assert sample.get_scalar("missing_scalar_name") is None + assert sample.scalars.get("missing_scalar_name") is None def test_get_scalar(self, sample_with_scalar): - assert sample_with_scalar.get_scalar("missing_scalar_name") is None - assert sample_with_scalar.get_scalar("test_scalar_1") is not None + assert sample_with_scalar.scalars.get("missing_scalar_name") is None + assert sample_with_scalar.scalars.get("test_scalar_1") is not None - def test_add_scalar_empty(self, sample_with_scalar): - assert isinstance(sample_with_scalar.get_scalar("test_scalar_1"), float) + def test_scalars_add_empty(self, sample_with_scalar): + assert isinstance(sample_with_scalar.scalars.get("test_scalar_1"), float) - def test_add_scalar(self, sample_with_scalar): - sample_with_scalar.add_scalar("test_scalar_2", np.random.randn()) + def test_scalars_add(self, sample_with_scalar): + sample_with_scalar.scalars.add("test_scalar_2", np.random.randn()) def test_del_scalar_unknown_scalar(self, sample_with_scalar): with pytest.raises(KeyError): - sample_with_scalar.del_scalar("non_existent_scalar") + sample_with_scalar.scalars.remove("non_existent_scalar") def test_del_scalar_no_scalar(self): sample = Sample() with pytest.raises(KeyError): - sample.del_scalar("non_existent_scalar") + sample.scalars.remove("non_existent_scalar") def test_del_scalar(self, sample_with_scalar): - assert len(sample_with_scalar.get_scalar_names()) == 1 + assert len(sample_with_scalar.scalars.get_names()) == 1 - sample_with_scalar.add_scalar("test_scalar_2", np.random.randn(5)) - assert len(sample_with_scalar.get_scalar_names()) == 2 + sample_with_scalar.scalars.add("test_scalar_2", np.random.randn(5)) + assert len(sample_with_scalar.scalars.get_names()) == 2 - scalar = sample_with_scalar.del_scalar("test_scalar_1") - assert len(sample_with_scalar.get_scalar_names()) == 1 + scalar = sample_with_scalar.scalars.remove("test_scalar_1") + assert len(sample_with_scalar.scalars.get_names()) == 1 assert scalar is not None assert isinstance(scalar, float) - scalar = sample_with_scalar.del_scalar("test_scalar_2") - assert len(sample_with_scalar.get_scalar_names()) == 0 + scalar = sample_with_scalar.scalars.remove("test_scalar_2") + assert len(sample_with_scalar.scalars.get_names()) == 0 assert scalar is not None assert isinstance(scalar, np.ndarray) @@ -1045,7 +1045,7 @@ def test_get_features_from_identifiers( def test_update_features_from_identifier( self, sample_with_tree_and_scalar_and_time_series ): - before = sample_with_tree_and_scalar_and_time_series.get_scalar("test_scalar_1") + before = sample_with_tree_and_scalar_and_time_series.scalars.get("test_scalar_1") sample_ = ( sample_with_tree_and_scalar_and_time_series.update_features_from_identifier( feature_identifiers={"type": "scalar", "name": "test_scalar_1"}, @@ -1053,7 +1053,7 @@ def test_update_features_from_identifier( in_place=False, ) ) - after = sample_.get_scalar("test_scalar_1") + after = sample_.scalars.get("test_scalar_1") assert after != before before = sample_with_tree_and_scalar_and_time_series.get_time_series( @@ -1161,14 +1161,14 @@ def test_from_features_identifier( sample_ = sample_with_tree_and_scalar_and_time_series.from_features_identifier( feature_identifiers={"type": "scalar", "name": "test_scalar_1"}, ) - assert sample_.get_scalar_names() == ["test_scalar_1"] + assert sample_.scalars.get_names() == ["test_scalar_1"] assert len(sample_.get_time_series_names()) == 0 assert len(sample_.get_field_names()) == 0 sample_ = sample_with_tree_and_scalar_and_time_series.from_features_identifier( feature_identifiers={"type": "time_series", "name": "test_time_series_1"}, ) - assert len(sample_.get_scalar_names()) == 0 + assert len(sample_.scalars.get_names()) == 0 assert sample_.get_time_series_names() == ["test_time_series_1"] assert len(sample_.get_field_names()) == 0 @@ -1182,7 +1182,7 @@ def test_from_features_identifier( "time": 0.0, }, ) - assert len(sample_.get_scalar_names()) == 0 + assert len(sample_.scalars.get_names()) == 0 assert len(sample_.get_time_series_names()) == 0 assert sample_.get_field_names() == ["test_node_field_1"] @@ -1194,7 +1194,7 @@ def test_from_features_identifier( "time": 0.0, }, ) - assert len(sample_.get_scalar_names()) == 0 + assert len(sample_.scalars.get_names()) == 0 assert len(sample_.get_time_series_names()) == 0 assert len(sample_.get_field_names()) == 0 @@ -1204,7 +1204,7 @@ def test_from_features_identifier( {"type": "nodes"}, ], ) - assert len(sample_.get_scalar_names()) == 0 + assert len(sample_.scalars.get_names()) == 0 assert len(sample_.get_time_series_names()) == 0 assert sample_.get_field_names() == ["test_node_field_1"] diff --git a/tests/utils/test_init_with_tabular.py b/tests/utils/test_init_with_tabular.py index 1a5bcfc7..2af9c734 100644 --- a/tests/utils/test_init_with_tabular.py +++ b/tests/utils/test_init_with_tabular.py @@ -52,7 +52,7 @@ def test_initialize_dataset_with_tabular_data( assert len(dataset) == nb_samples sample_1 = dataset[1] - scalar_value = sample_1.get_scalar("scalar_name_1") + scalar_value = sample_1.scalars.get("scalar_name_1") assert isinstance(scalar_value, float) def test_initialize_dataset_with_quantity_tabular_data( @@ -62,5 +62,5 @@ def test_initialize_dataset_with_quantity_tabular_data( assert len(dataset) == nb_samples # scalar_names = ["test_scalar", "test_1D_field", "test_2D_field"] - # tabular_data_subset = dataset.get_scalars_to_tabular(scalar_names) + # tabular_data_subset = dataset.scalars.gets_to_tabular(scalar_names) # assert isinstance(tabular_data_subset, dict) diff --git a/tests/utils/test_stats.py b/tests/utils/test_stats.py index f8d791ae..341142ca 100644 --- a/tests/utils/test_stats.py +++ b/tests/utils/test_stats.py @@ -59,7 +59,7 @@ def stats(): @pytest.fixture() def sample_with_scalar(np_samples_3): s = Sample() - s.add_scalar("foo", float(np_samples_3.mean())) + s.scalars.add("foo", float(np_samples_3.mean())) return s @@ -245,7 +245,7 @@ def test_get_stats(self, stats, samples): stats_dict = stats.get_stats() sample = samples[0] - feature_names = sample.get_scalar_names() + feature_names = sample.scalars.get_names() feature_names.extend( item for ts_name in sample.get_time_series_names() From f0256ca5b87c89252a1fc4c3340e1c6490210b18 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Wed, 27 Aug 2025 09:09:03 +0200 Subject: [PATCH 05/13] Not breaking current API [skip ci] --- .../2D_MultiScHypEl/construct_prediction.py | 2 +- .../prepare_2D_MultiScHypEl.py | 4 +- .../FNO/2D_MultiScHypEl/train_and_predict.py | 8 +-- .../FNO/Rotor37/construct_prediction.py | 2 +- benchmarks/FNO/Rotor37/prepare_rotor37.py | 4 +- benchmarks/FNO/Rotor37/train_and_predict.py | 8 +-- .../FNO/Tensile2d/construct_prediction.py | 2 +- benchmarks/FNO/Tensile2d/prepare_tensile2d.py | 4 +- benchmarks/FNO/Tensile2d/train_and_predict.py | 8 +-- .../FNO/VKI-LS59/construct_prediction.py | 2 +- benchmarks/FNO/VKI-LS59/prepare_vki.py | 2 +- benchmarks/FNO/VKI-LS59/train_and_predict.py | 8 +-- benchmarks/MGN/data.py | 4 +- benchmarks/MMGP/Rotor37/run_rotor37.py | 10 +-- .../MMGP/Tensile2d/construct_prediction.py | 2 +- benchmarks/MMGP/VKI-LS59/data.py | 4 +- .../Vi-Transf/main_elasto_plasto_dynamics.py | 12 ++-- benchmarks/Vi-Transf/main_stationary.py | 12 ++-- .../bridges/airfrans_sample_to_geometric.py | 8 +-- .../bridges/base_sample_to_geometric.py | 4 +- .../bridges/multiscale_sample_to_geometric.py | 4 +- .../bridges/tensile_sample_to_geometric.py | 4 +- .../loader/bridges/vki_sample_to_geometric.py | 4 +- .../convert_users_data_into_plaid.ipynb | 4 +- docs/source/notebooks/dataset.ipynb | 32 +++++----- docs/source/notebooks/huggingface.ipynb | 4 +- docs/source/notebooks/init_with_tabular.ipynb | 2 +- docs/source/notebooks/pipeline.ipynb | 4 +- docs/source/notebooks/sample.ipynb | 14 ++-- docs/source/notebooks/stats.ipynb | 4 +- .../bridges/huggingface_bridge_example.py | 4 +- examples/containers/bench_parallel_load.py | 4 +- examples/containers/dataset_example.py | 32 +++++----- examples/containers/sample_example.py | 16 ++--- examples/convert_users_data_example.py | 4 +- examples/pipelines/pipeline.py | 4 +- examples/utils/init_with_tabular_example.py | 2 +- examples/utils/stats_example.py | 6 +- src/plaid/containers/dataset.py | 6 +- src/plaid/containers/sample.py | 64 +++++++++++++++---- src/plaid/post/bisect.py | 4 +- src/plaid/utils/init_with_tabular.py | 2 +- src/plaid/utils/stats.py | 4 +- tests/bridges/test_huggingface_bridge.py | 2 +- tests/conftest.py | 4 +- tests/containers/test_dataset.py | 2 +- tests/containers/test_sample.py | 50 +++++++-------- tests/utils/test_init_with_tabular.py | 2 +- tests/utils/test_stats.py | 4 +- 49 files changed, 220 insertions(+), 182 deletions(-) diff --git a/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py b/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py index a1528333..75065279 100644 --- a/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py +++ b/benchmarks/FNO/2D_MultiScHypEl/construct_prediction.py @@ -65,7 +65,7 @@ for fn in out_fields_names: prediction[count][fn] = op.dot(sample_pred.get_field(fn)) for sn in out_scalars_names: - prediction[count][sn] = sample_pred.scalars.get(sn) + prediction[count][sn] = sample_pred.get_scalar(sn) count += 1 diff --git a/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py b/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py index dc11efc4..e8c349ba 100644 --- a/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py +++ b/benchmarks/FNO/2D_MultiScHypEl/prepare_2D_MultiScHypEl.py @@ -108,8 +108,8 @@ def compute_signed_distance(mesh,eval_points): for scalar_name in scalar_names: - old_scalar= sample.scalars.get( name=scalar_name) - new_sample.scalars.add(scalar_name, old_scalar) + old_scalar= sample.get_scalar( name=scalar_name) + new_sample.add_scalar(scalar_name, old_scalar) new_sample.add_field("Signed_Distance",compute_signed_distance(copy.deepcopy(input_mesh),rec_mesh.nodes)) path = os.path.join(prepared_data_dir,"dataset/samples/sample_{:09d}".format(sample_index)) diff --git a/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py b/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py index d0bae25f..8990aaa0 100644 --- a/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py +++ b/benchmarks/FNO/2D_MultiScHypEl/train_and_predict.py @@ -46,14 +46,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size, size)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :] = dataset[id_sample].get_field(fn).reshape((size, size)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].scalars.get(sn) + outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].get_scalar(sn) min_in = inputs.min(axis=(0, 2, 3), keepdims=True) @@ -125,7 +125,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) inputs = (inputs - min_in) / (max_in - min_in) @@ -154,7 +154,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) + dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/FNO/Rotor37/construct_prediction.py b/benchmarks/FNO/Rotor37/construct_prediction.py index a7b3daa7..dbe90b7e 100644 --- a/benchmarks/FNO/Rotor37/construct_prediction.py +++ b/benchmarks/FNO/Rotor37/construct_prediction.py @@ -65,7 +65,7 @@ for fn in out_fields_names: prediction[count][fn] = op.dot(sample_pred.get_field(fn)) for sn in out_scalars_names: - prediction[count][sn] = sample_pred.scalars.get(sn) + prediction[count][sn] = sample_pred.get_scalar(sn) count += 1 diff --git a/benchmarks/FNO/Rotor37/prepare_rotor37.py b/benchmarks/FNO/Rotor37/prepare_rotor37.py index bc7c0d7d..96973cf2 100644 --- a/benchmarks/FNO/Rotor37/prepare_rotor37.py +++ b/benchmarks/FNO/Rotor37/prepare_rotor37.py @@ -112,8 +112,8 @@ def compute_signed_distance(mesh,eval_points): for scalar_name in scalar_names: - old_scalar= sample.scalars.get( name=scalar_name) - new_sample.scalars.add(scalar_name, old_scalar) + old_scalar= sample.get_scalar( name=scalar_name) + new_sample.add_scalar(scalar_name, old_scalar) path = os.path.join(prepared_data_dir,"dataset/samples/sample_{:09d}".format(sample_index)) diff --git a/benchmarks/FNO/Rotor37/train_and_predict.py b/benchmarks/FNO/Rotor37/train_and_predict.py index d383ed15..92f88d6f 100644 --- a/benchmarks/FNO/Rotor37/train_and_predict.py +++ b/benchmarks/FNO/Rotor37/train_and_predict.py @@ -47,14 +47,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :, :] = dataset[id_sample].get_scalar(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size, size, size)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :, :] = dataset[id_sample].get_field(fn).reshape((size, size, size)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :, :] = dataset[id_sample].scalars.get(sn) + outputs[i, k+len(out_fields_names), :, :, :] = dataset[id_sample].get_scalar(sn) min_in = inputs.min(axis=(0, 2, 3, 4), keepdims=True) @@ -126,7 +126,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :, :] = dataset[id_sample].get_scalar(sn) inputs = (inputs - min_in) / (max_in - min_in+ 1e-8) @@ -155,7 +155,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :, :].flatten())) + dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/FNO/Tensile2d/construct_prediction.py b/benchmarks/FNO/Tensile2d/construct_prediction.py index 27760e12..d0c7c855 100644 --- a/benchmarks/FNO/Tensile2d/construct_prediction.py +++ b/benchmarks/FNO/Tensile2d/construct_prediction.py @@ -66,7 +66,7 @@ for fn in out_fields_names: prediction[count][fn] = op.dot(sample_pred.get_field(fn)) for sn in out_scalars_names: - prediction[count][sn] = sample_pred.scalars.get(sn) + prediction[count][sn] = sample_pred.get_scalar(sn) count += 1 diff --git a/benchmarks/FNO/Tensile2d/prepare_tensile2d.py b/benchmarks/FNO/Tensile2d/prepare_tensile2d.py index 55a1bed9..8691482f 100644 --- a/benchmarks/FNO/Tensile2d/prepare_tensile2d.py +++ b/benchmarks/FNO/Tensile2d/prepare_tensile2d.py @@ -103,8 +103,8 @@ def compute_signed_distance(mesh,eval_points): for scalar_name in scalar_names: - old_scalar= sample.scalars.get( name=scalar_name) - new_sample.scalars.add(scalar_name, old_scalar) + old_scalar= sample.get_scalar( name=scalar_name) + new_sample.add_scalar(scalar_name, old_scalar) new_sample.add_field("Signed_Distance",compute_signed_distance(copy.deepcopy(input_mesh),rec_mesh.nodes)) path = os.path.join(prepared_data_dir,"dataset/samples/sample_{:09d}".format(sample_index)) diff --git a/benchmarks/FNO/Tensile2d/train_and_predict.py b/benchmarks/FNO/Tensile2d/train_and_predict.py index f4903fd0..faeac67b 100644 --- a/benchmarks/FNO/Tensile2d/train_and_predict.py +++ b/benchmarks/FNO/Tensile2d/train_and_predict.py @@ -47,14 +47,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size, size)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :] = dataset[id_sample].get_field(fn).reshape((size, size)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].scalars.get(sn) + outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].get_scalar(sn) min_in = inputs.min(axis=(0, 2, 3), keepdims=True) @@ -124,7 +124,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance").reshape((size, size)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) inputs = (inputs - min_in) / (max_in - min_in) @@ -152,7 +152,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) + dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/FNO/VKI-LS59/construct_prediction.py b/benchmarks/FNO/VKI-LS59/construct_prediction.py index d5fb7525..cb3c62bf 100644 --- a/benchmarks/FNO/VKI-LS59/construct_prediction.py +++ b/benchmarks/FNO/VKI-LS59/construct_prediction.py @@ -62,7 +62,7 @@ for fn in out_fields_names: prediction[count][fn] = sample_pred.get_field(fn, base_name="Base_2_2") for sn in out_scalars_names: - prediction[count][sn] = sample_pred.scalars.get(sn) + prediction[count][sn] = sample_pred.get_scalar(sn) count += 1 diff --git a/benchmarks/FNO/VKI-LS59/prepare_vki.py b/benchmarks/FNO/VKI-LS59/prepare_vki.py index 3154e82c..da80b796 100644 --- a/benchmarks/FNO/VKI-LS59/prepare_vki.py +++ b/benchmarks/FNO/VKI-LS59/prepare_vki.py @@ -61,7 +61,7 @@ raise("unkown sample_index") for sn in scalar_names: - new_sample.scalars.add(sn, sample.scalars.get(sn)) + new_sample.add_scalar(sn, sample.get_scalar(sn)) new_sample.add_field("Signed_Distance", sample.get_field("sdf", base_name="Base_2_2")) diff --git a/benchmarks/FNO/VKI-LS59/train_and_predict.py b/benchmarks/FNO/VKI-LS59/train_and_predict.py index b4417265..26f9e386 100644 --- a/benchmarks/FNO/VKI-LS59/train_and_predict.py +++ b/benchmarks/FNO/VKI-LS59/train_and_predict.py @@ -49,14 +49,14 @@ for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance", base_name="Base_2_2").reshape((size1, size2)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) outputs = np.empty((n_train, len(out_scalars_names)+len(out_fields_names), size1, size2)) for i, id_sample in enumerate(ids_train): for k, fn in enumerate(out_fields_names): outputs[i, k, :, :] = dataset[id_sample].get_field(fn, base_name="Base_2_2").reshape((size1, size2)) for k, sn in enumerate(out_scalars_names): - outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].scalars.get(sn) + outputs[i, k+len(out_fields_names), :, :] = dataset[id_sample].get_scalar(sn) min_in = inputs.min(axis=(0, 2, 3), keepdims=True) @@ -128,7 +128,7 @@ def __getitem__(self, idx): for in_chan in range(len(in_scalars_names)+1): inputs[i, in_chan, :, :] = dataset[id_sample].get_field("Signed_Distance", base_name="Base_2_2").reshape((size1, size2)) for k, sn in enumerate(in_scalars_names): - inputs[i, k+1, :, :] = dataset[id_sample].scalars.get(sn) + inputs[i, k+1, :, :] = dataset[id_sample].get_scalar(sn) inputs = (inputs - min_in) / (max_in - min_in) @@ -145,7 +145,7 @@ def __getitem__(self, idx): for k, fn in enumerate(out_fields_names): dataset[id_sample].add_field(fn, outputs_pred[i, k, :, :].flatten()) for k, sn in enumerate(out_scalars_names): - dataset[id_sample].scalars.add(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) + dataset[id_sample].add_scalar(sn, np.mean(outputs_pred[i, k+len(out_fields_names), :, :].flatten())) if os.path.exists(predicted_data_dir) and os.path.isdir(predicted_data_dir): diff --git a/benchmarks/MGN/data.py b/benchmarks/MGN/data.py index a82f91bb..6837cb14 100644 --- a/benchmarks/MGN/data.py +++ b/benchmarks/MGN/data.py @@ -231,7 +231,7 @@ def process_samples(dataset_name, dataset_path, indices, field_names, process_ty # Retrieve input scalars in_scalars_names = scalar_input_dict.get(dataset_name, []) X_scalars.append( - [mesh_data.scalars.get(fn) for fn in in_scalars_names] + [mesh_data.get_scalar(fn) for fn in in_scalars_names] if in_scalars_names else [] ) @@ -239,7 +239,7 @@ def process_samples(dataset_name, dataset_path, indices, field_names, process_ty # Retrieve output scalars out_scalars_names = scalar_output_dict.get(dataset_name, []) Y_scalars.append( - [mesh_data.scalars.get(fn) for fn in out_scalars_names] + [mesh_data.get_scalar(fn) for fn in out_scalars_names] if out_scalars_names else [] ) diff --git a/benchmarks/MMGP/Rotor37/run_rotor37.py b/benchmarks/MMGP/Rotor37/run_rotor37.py index 79bf91ea..6982abea 100644 --- a/benchmarks/MMGP/Rotor37/run_rotor37.py +++ b/benchmarks/MMGP/Rotor37/run_rotor37.py @@ -58,16 +58,16 @@ def convert_data( sample = Sample.model_validate(pickle.loads(dataset[id]["sample"])) nodes.append(sample.get_nodes()) - omega = sample.scalars.get("Omega") - pressure = sample.scalars.get("P") + omega = sample.get_scalar("Omega") + pressure = sample.get_scalar("P") density = sample.get_field("Density") pressure_field = sample.get_field("Pressure") temperature = sample.get_field("Temperature") - massflow = sample.scalars.get("Massflow") - compression_ratio = sample.scalars.get("Compression_ratio") - efficiency = sample.scalars.get("Efficiency") + massflow = sample.get_scalar("Massflow") + compression_ratio = sample.get_scalar("Compression_ratio") + efficiency = sample.get_scalar("Efficiency") X_scalars.append(np.array([omega, pressure])) Y_scalars.append(np.array([massflow, compression_ratio, efficiency])) diff --git a/benchmarks/MMGP/Tensile2d/construct_prediction.py b/benchmarks/MMGP/Tensile2d/construct_prediction.py index d48f37f2..a218bdce 100644 --- a/benchmarks/MMGP/Tensile2d/construct_prediction.py +++ b/benchmarks/MMGP/Tensile2d/construct_prediction.py @@ -43,7 +43,7 @@ for fn in out_fields_names: prediction[count][fn] = sample_pred.get_field(fn+"_predicted") for sn in out_scalars_names: - prediction[count][sn] = sample_pred.scalars.get(sn+"_predicted") + prediction[count][sn] = sample_pred.get_scalar(sn+"_predicted") count += 1 diff --git a/benchmarks/MMGP/VKI-LS59/data.py b/benchmarks/MMGP/VKI-LS59/data.py index 83fe147a..4d2eb25b 100644 --- a/benchmarks/MMGP/VKI-LS59/data.py +++ b/benchmarks/MMGP/VKI-LS59/data.py @@ -56,7 +56,7 @@ def extract_split_data( # Input scalar values for key in input_scalars: - inputs[key] = [plaid_dataset[i].scalars.get(key) for i in ids] + inputs[key] = [plaid_dataset[i].get_scalar(key) for i in ids] # --- OUTPUTS --- # Selected mesh field data @@ -67,7 +67,7 @@ def extract_split_data( # Selected output scalar values for key in SCALAR_OUTPUTS: - outputs[key] = [plaid_dataset[i].scalars.get(key) for i in ids] + outputs[key] = [plaid_dataset[i].get_scalar(key) for i in ids] return inputs, outputs diff --git a/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py b/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py index f009840e..912cea7b 100644 --- a/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py +++ b/benchmarks/Vi-Transf/main_elasto_plasto_dynamics.py @@ -213,14 +213,14 @@ def main(cfg): optimizer.step() for n, fn in enumerate(output_fields_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"train/loss/{fn}", epoch_train_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"train/loss/{sn}", epoch_train_scalar_losses[n].item(), epoch ) - tb_logger.scalars.add("train/loss", epoch_train_loss, epoch) + tb_logger.add_scalar("train/loss", epoch_train_loss, epoch) # validation loop epoch_val_loss = 0 @@ -245,14 +245,14 @@ def main(cfg): epoch_val_loss += loss.item() * (local_batch_size / len(val_dataset)) for n, fn in enumerate(output_fields_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"val/loss/{fn}", epoch_val_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"val/loss/{sn}", epoch_val_scalar_losses[n].item(), epoch ) - tb_logger.scalars.add("val/loss", epoch_val_loss, epoch) + tb_logger.add_scalar("val/loss", epoch_val_loss, epoch) logger.info( f"Epoch {epoch:>{len(str(epochs))}}: Train Loss: {epoch_train_loss:.5f} | Val Loss: {epoch_val_loss:.5f}" ) diff --git a/benchmarks/Vi-Transf/main_stationary.py b/benchmarks/Vi-Transf/main_stationary.py index 27ea1dec..0c39bc6c 100644 --- a/benchmarks/Vi-Transf/main_stationary.py +++ b/benchmarks/Vi-Transf/main_stationary.py @@ -176,14 +176,14 @@ def main(cfg): optimizer.step() for n, fn in enumerate(output_fields_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"train/loss/{fn}", epoch_train_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"train/loss/{sn}", epoch_train_scalar_losses[n].item(), epoch ) - tb_logger.scalars.add("train/loss", epoch_train_loss, epoch) + tb_logger.add_scalar("train/loss", epoch_train_loss, epoch) # validation loop epoch_val_loss = 0 @@ -208,14 +208,14 @@ def main(cfg): epoch_val_loss += loss.item() * (local_batch_size / len(val_dataset)) for n, fn in enumerate(output_fields_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"val/loss/{fn}", epoch_val_field_losses[n].item(), epoch ) for n, sn in enumerate(output_scalars_names): - tb_logger.scalars.add( + tb_logger.add_scalar( f"val/loss/{sn}", epoch_val_scalar_losses[n].item(), epoch ) - tb_logger.scalars.add("val/loss", epoch_val_loss, epoch) + tb_logger.add_scalar("val/loss", epoch_val_loss, epoch) logger.info( f"Epoch {epoch:>{len(str(epochs))}}: Train Loss: {epoch_train_loss:.5f} | Val Loss: {epoch_val_loss:.5f}" ) diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py index 917177c6..71e4e2d0 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/airfrans_sample_to_geometric.py @@ -30,11 +30,11 @@ def airfrans_sample_to_geometric( edge_weight = np.linalg.norm(v2 - v1, axis=1) # loading scalars - aoa = sample.scalars.get("angle_of_attack") - inlet_velocity = sample.scalars.get("inlet_velocity") + aoa = sample.get_scalar("angle_of_attack") + inlet_velocity = sample.get_scalar("inlet_velocity") u_inlet = [np.cos(aoa) * inlet_velocity, np.sin(aoa) * inlet_velocity] - cl = sample.scalars.get("C_L") - cd = sample.scalars.get("C_D") + cl = sample.get_scalar("C_L") + cd = sample.get_scalar("C_D") output_scalars = np.array([cl, cd]) # loading fields diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py index 0e325f81..8521b0cd 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/base_sample_to_geometric.py @@ -46,9 +46,9 @@ def base_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.scalars.get(name)) + input_scalars.append(sample.get_scalar(name)) for name in output_scalars_names: - output_scalars.append(sample.scalars.get(name)) + output_scalars.append(sample.get_scalar(name)) # loading fields input_fields_names = problem_definition.get_input_fields_names() diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py index d8059a24..d9d6f561 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/multiscale_sample_to_geometric.py @@ -52,9 +52,9 @@ def multiscale_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.scalars.get(name)) + input_scalars.append(sample.get_scalar(name)) for name in output_scalars_names: - output_scalars.append(sample.scalars.get(name)) + output_scalars.append(sample.get_scalar(name)) input_fields = vertices input_fields_names = ["x", "y"] diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py index b5956650..c16850f9 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/tensile_sample_to_geometric.py @@ -61,9 +61,9 @@ def tensile_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.scalars.get(name)) + input_scalars.append(sample.get_scalar(name)) for name in output_scalars_names: - output_scalars.append(sample.scalars.get(name)) + output_scalars.append(sample.get_scalar(name)) # sdf and one hot encoding border_ids = get_border_ids(vertices, faces) diff --git a/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py b/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py index 649e6957..d8e18b2f 100644 --- a/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py +++ b/benchmarks/Vi-Transf/src/data/loader/bridges/vki_sample_to_geometric.py @@ -37,9 +37,9 @@ def vki_sample_to_geometric( input_scalars = [] output_scalars = [] for name in input_scalars_names: - input_scalars.append(sample.scalars.get(name)) + input_scalars.append(sample.get_scalar(name)) for name in output_scalars_names: - output_scalars.append(sample.scalars.get(name)) + output_scalars.append(sample.get_scalar(name)) if len(input_fields_names) >= 1: input_fields = [] diff --git a/docs/source/notebooks/convert_users_data_into_plaid.ipynb b/docs/source/notebooks/convert_users_data_into_plaid.ipynb index a2e8bac7..038871dc 100644 --- a/docs/source/notebooks/convert_users_data_into_plaid.ipynb +++ b/docs/source/notebooks/convert_users_data_into_plaid.ipynb @@ -196,10 +196,10 @@ "\n", " # Add random scalar values to the sample\n", " for sname in in_scalars_names:\n", - " sample.scalars.add(sname, np.random.randn())\n", + " sample.add_scalar(sname, np.random.randn())\n", "\n", " for sname in out_scalars_names:\n", - " sample.scalars.add(sname, np.random.randn())\n", + " sample.add_scalar(sname, np.random.randn())\n", "\n", " # Add random field values to the sample\n", " for j, sname in enumerate(out_fields_names):\n", diff --git a/docs/source/notebooks/dataset.ipynb b/docs/source/notebooks/dataset.ipynb index 91e11056..6593f6c5 100644 --- a/docs/source/notebooks/dataset.ipynb +++ b/docs/source/notebooks/dataset.ipynb @@ -151,7 +151,7 @@ "outputs": [], "source": [ "# Add a scalar to the Sample\n", - "sample_01.scalars.add(\"rotation\", np.random.randn())\n", + "sample_01.add_scalar(\"rotation\", np.random.randn())\n", "print(f\"{sample_01 = }\")" ] }, @@ -181,7 +181,7 @@ "outputs": [], "source": [ "# Add a scalar to the second Sample\n", - "sample_02.scalars.add(\"rotation\", np.random.randn())\n", + "sample_02.add_scalar(\"rotation\", np.random.randn())\n", "print(f\"{sample_02 = }\")" ] }, @@ -201,8 +201,8 @@ "# Initialize a third empty Sample\n", "print(\"#---# Empty Sample\")\n", "sample_03 = Sample()\n", - "sample_03.scalars.add(\"speed\", np.random.randn())\n", - "sample_03.scalars.add(\"rotation\", sample_01.scalars.get(\"rotation\"))\n", + "sample_03.add_scalar(\"speed\", np.random.randn())\n", + "sample_03.add_scalar(\"rotation\", sample_01.get_scalar(\"rotation\"))\n", "sample_03.add_tree(cgns_mesh)\n", "\n", "# Show Sample CGNS content\n", @@ -237,9 +237,9 @@ "print(f\"{sample_03 = }\", end=\"\\n\\n\")\n", "\n", "# Print sample scalar data\n", - "print(f\"{sample_03.scalars.get_names() = }\")\n", - "print(f\"{sample_03.scalars.get('speed') = }\")\n", - "print(f\"{sample_03.scalars.get('rotation') = }\", end=\"\\n\\n\")\n", + "print(f\"{sample_03.get_scalar_names( ) = }\")\n", + "print(f\"{sample_03.get_scalar('speed') = }\")\n", + "print(f\"{sample_03.get_scalar('rotation') = }\", end=\"\\n\\n\")\n", "\n", "# Print sample scalar data\n", "print(f\"{sample_03.get_field_names() = }\")\n", @@ -422,9 +422,9 @@ "print(f\"{dataset[1] = }\") # getitem strategy\n", "print(f\"{dataset[2] = }\", end=\"\\n\\n\")\n", "\n", - "print(\"scalar of the first sample = \", dataset[0].scalars.get_names())\n", - "print(\"scalar of the second sample = \", dataset[1].scalars.get_names())\n", - "print(\"scalar of the third sample = \", dataset[2].scalars.get_names())" + "print(\"scalar of the first sample = \", dataset[0].get_scalar_names( ))\n", + "print(\"scalar of the second sample = \", dataset[1].get_scalar_names( ))\n", + "print(\"scalar of the third sample = \", dataset[2].get_scalar_names( ))" ] }, { @@ -434,9 +434,9 @@ "outputs": [], "source": [ "# Access dataset information\n", - "print(f\"{dataset[0].scalars.get('rotation') = }\")\n", - "print(f\"{dataset[1].scalars.get('rotation') = }\")\n", - "print(f\"{dataset[2].scalars.get('rotation') = }\")" + "print(f\"{dataset[0].get_scalar('rotation') = }\")\n", + "print(f\"{dataset[1].get_scalar('rotation') = }\")\n", + "print(f\"{dataset[2].get_scalar('rotation') = }\")" ] }, { @@ -453,7 +453,7 @@ "outputs": [], "source": [ "# Print scalars in tabular format\n", - "print(f\"{dataset.scalars.get_names() = }\", end=\"\\n\\n\")\n", + "print(f\"{dataset.get_scalar_names( ) = }\", end=\"\\n\\n\")\n", "\n", "dprint(\"get rotation scalar = \", dataset.scalars.gets_to_tabular([\"rotation\"]))\n", "dprint(\"get speed scalar = \", dataset.scalars.gets_to_tabular([\"speed\"]), end=\"\\n\\n\")\n", @@ -518,8 +518,8 @@ "samples = []\n", "for _ in range(nb_samples):\n", " sample = Sample()\n", - " sample.scalars.add(\"rotation\", np.random.rand() + 1.0)\n", - " sample.scalars.add(\"random_name\", np.random.rand() - 1.0)\n", + " sample.add_scalar(\"rotation\", np.random.rand() + 1.0)\n", + " sample.add_scalar(\"random_name\", np.random.rand() - 1.0)\n", " samples.append(sample)\n", "\n", "# Add a list of Samples\n", diff --git a/docs/source/notebooks/huggingface.ipynb b/docs/source/notebooks/huggingface.ipynb index ec1b21ed..8b64dc63 100644 --- a/docs/source/notebooks/huggingface.ipynb +++ b/docs/source/notebooks/huggingface.ipynb @@ -47,7 +47,7 @@ "def show_sample(sample: Sample):\n", " print(f\"sample = {sample}\")\n", " sample.show_tree()\n", - " print(f\"{sample.scalars.get_names() = }\")\n", + " print(f\"{sample.get_scalar_names( ) = }\")\n", " print(f\"{sample.get_field_names() = }\")" ] }, @@ -93,7 +93,7 @@ " sample = Sample()\n", "\n", " sample.add_tree(MeshToCGNS(mesh))\n", - " sample.scalars.add(\"scalar\", np.random.randn())\n", + " sample.add_scalar(\"scalar\", np.random.randn())\n", " sample.add_field(\"node_field\", np.random.rand(1, len(points)), location=\"Vertex\")\n", " sample.add_field(\n", " \"cell_field\", np.random.rand(1, len(points)), location=\"CellCenter\"\n", diff --git a/docs/source/notebooks/init_with_tabular.ipynb b/docs/source/notebooks/init_with_tabular.ipynb index 92e0134d..7da205d9 100644 --- a/docs/source/notebooks/init_with_tabular.ipynb +++ b/docs/source/notebooks/init_with_tabular.ipynb @@ -114,7 +114,7 @@ "outputs": [], "source": [ "# Access and display the value of a particular scalar within a sample\n", - "scalar_value = sample_1.scalars.get(\"scalar_0\")\n", + "scalar_value = sample_1.get_scalar(\"scalar_0\")\n", "print(\"Scalar 'scalar_0' in Sample 1:\", scalar_value)" ] }, diff --git a/docs/source/notebooks/pipeline.ipynb b/docs/source/notebooks/pipeline.ipynb index 29b6bd8e..540496d5 100644 --- a/docs/source/notebooks/pipeline.ipynb +++ b/docs/source/notebooks/pipeline.ipynb @@ -164,7 +164,7 @@ "source": [ "dataset_train = dataset_train.from_features_identifier(all_feature_id)\n", "print(\"dataset_train:\", dataset_train)\n", - "print(\"scalar names =\", dataset_train.scalars.get_names())\n", + "print(\"scalar names =\", dataset_train.get_scalar_names( ))\n", "print(\"field names =\", dataset_train.get_field_names())" ] }, @@ -216,7 +216,7 @@ "source": [ "preprocessed_dataset = preprocessor.fit_transform(dataset_train)\n", "print(\"preprocessed_dataset:\", preprocessed_dataset)\n", - "print(\"scalar names =\", preprocessed_dataset.scalars.get_names())\n", + "print(\"scalar names =\", preprocessed_dataset.get_scalar_names( ))\n", "print(\"field names =\", preprocessed_dataset.get_field_names())" ] }, diff --git a/docs/source/notebooks/sample.ipynb b/docs/source/notebooks/sample.ipynb index 1090693b..e9d8b5e6 100644 --- a/docs/source/notebooks/sample.ipynb +++ b/docs/source/notebooks/sample.ipynb @@ -54,7 +54,7 @@ "def show_sample(sample: Sample):\n", " print(f\"sample = {sample}\")\n", " sample.show_tree()\n", - " print(f\"{sample.scalars.get_names() = }\")\n", + " print(f\"{sample.get_scalar_names( ) = }\")\n", " print(f\"{sample.get_field_names() = }\")" ] }, @@ -156,7 +156,7 @@ "outputs": [], "source": [ "# Add a rotation scalar to this Sample\n", - "sample.scalars.add(\"rotation\", np.random.randn())\n", + "sample.add_scalar(\"rotation\", np.random.randn())\n", "\n", "show_sample(sample)" ] @@ -168,8 +168,8 @@ "outputs": [], "source": [ "# Add a more scalars to this Sample\n", - "sample.scalars.add(\"speed\", np.random.randn())\n", - "sample.scalars.add(\"other\", np.random.randn())\n", + "sample.add_scalar(\"speed\", np.random.randn())\n", + "sample.add_scalar(\"other\", np.random.randn())\n", "\n", "show_sample(sample)" ] @@ -402,9 +402,9 @@ "outputs": [], "source": [ "# It will look for a default base if no base and zone are given\n", - "print(f\"{sample.scalars.get_names() = }\")\n", - "print(f\"{sample.scalars.get('omega') = }\")\n", - "print(f\"{sample.scalars.get('rotation') = }\")" + "print(f\"{sample.get_scalar_names( ) = }\")\n", + "print(f\"{sample.get_scalar('omega') = }\")\n", + "print(f\"{sample.get_scalar('rotation') = }\")" ] }, { diff --git a/docs/source/notebooks/stats.ipynb b/docs/source/notebooks/stats.ipynb index 50e992ea..a805602b 100644 --- a/docs/source/notebooks/stats.ipynb +++ b/docs/source/notebooks/stats.ipynb @@ -198,7 +198,7 @@ "spatial_shape_max = 20\n", "#\n", "for sample in samples:\n", - " sample.scalars.add(\"test_scalar\", np.random.randn())\n", + " sample.add_scalar(\"test_scalar\", np.random.randn())\n", " sample.init_base(2, 3, \"test_base\")\n", " zone_shape = np.array([0, 0, 0])\n", " sample.init_zone(zone_shape, zone_name=\"test_zone\")\n", @@ -242,7 +242,7 @@ "samples = [Sample() for _ in range(nb_samples)]\n", "\n", "for sample in samples:\n", - " sample.scalars.add(\"test_scalar\", np.random.randn())\n", + " sample.add_scalar(\"test_scalar\", np.random.randn())\n", " sample.init_base(2, 3, \"test_base\")\n", " zone_shape = np.array([0, 0, 0])\n", " sample.init_zone(zone_shape, zone_name=\"test_zone\")\n", diff --git a/examples/bridges/huggingface_bridge_example.py b/examples/bridges/huggingface_bridge_example.py index 3c9f4380..05228513 100644 --- a/examples/bridges/huggingface_bridge_example.py +++ b/examples/bridges/huggingface_bridge_example.py @@ -91,10 +91,10 @@ # Add random scalar values to the sample for sname in in_scalars_names: - sample.scalars.add(sname, np.random.randn()) + sample.add_scalar(sname, np.random.randn()) for sname in out_scalars_names: - sample.scalars.add(sname, np.random.randn()) + sample.add_scalar(sname, np.random.randn()) # Add random field values to the sample for j, sname in enumerate(out_fields_names): diff --git a/examples/containers/bench_parallel_load.py b/examples/containers/bench_parallel_load.py index 77c4f9c2..d640d774 100644 --- a/examples/containers/bench_parallel_load.py +++ b/examples/containers/bench_parallel_load.py @@ -88,8 +88,8 @@ smp = tmpsmp # ---# Add some random data - smp.scalars.add("id", i) - smp.scalars.add("s0", np.random.randn()) + smp.add_scalar("id", i) + smp.add_scalar("s0", np.random.randn()) smp.add_field("f0", np.random.randn(100)) dset.add_sample(smp) diff --git a/examples/containers/dataset_example.py b/examples/containers/dataset_example.py index 625e9619..ddab819b 100644 --- a/examples/containers/dataset_example.py +++ b/examples/containers/dataset_example.py @@ -93,7 +93,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # %% # Add a scalar to the Sample -sample_01.scalars.add("rotation", np.random.randn()) +sample_01.add_scalar("rotation", np.random.randn()) print(f"{sample_01 = }") # %% [markdown] @@ -107,7 +107,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # %% # Add a scalar to the second Sample -sample_02.scalars.add("rotation", np.random.randn()) +sample_02.add_scalar("rotation", np.random.randn()) print(f"{sample_02 = }") # %% [markdown] @@ -117,8 +117,8 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # Initialize a third empty Sample print("#---# Empty Sample") sample_03 = Sample() -sample_03.scalars.add("speed", np.random.randn()) -sample_03.scalars.add("rotation", sample_01.scalars.get("rotation")) +sample_03.add_scalar("speed", np.random.randn()) +sample_03.add_scalar("rotation", sample_01.get_scalar("rotation")) sample_03.add_tree(cgns_mesh) # Show Sample CGNS content @@ -137,9 +137,9 @@ def dprint(name: str, dictio: dict, end: str = "\n"): print(f"{sample_03 = }", end="\n\n") # Print sample scalar data -print(f"{sample_03.scalars.get_names() = }") -print(f"{sample_03.scalars.get('speed') = }") -print(f"{sample_03.scalars.get('rotation') = }", end="\n\n") +print(f"{sample_03.get_scalar_names( ) = }") +print(f"{sample_03.get_scalar('speed') = }") +print(f"{sample_03.get_scalar('rotation') = }", end="\n\n") # Print sample scalar data print(f"{sample_03.get_field_names() = }") @@ -238,22 +238,22 @@ def dprint(name: str, dictio: dict, end: str = "\n"): print(f"{dataset[1] = }") # getitem strategy print(f"{dataset[2] = }", end="\n\n") -print("scalar of the first sample = ", dataset[0].scalars.get_names()) -print("scalar of the second sample = ", dataset[1].scalars.get_names()) -print("scalar of the third sample = ", dataset[2].scalars.get_names()) +print("scalar of the first sample = ", dataset[0].get_scalar_names( )) +print("scalar of the second sample = ", dataset[1].get_scalar_names( )) +print("scalar of the third sample = ", dataset[2].get_scalar_names( )) # %% # Access dataset information -print(f"{dataset[0].scalars.get('rotation') = }") -print(f"{dataset[1].scalars.get('rotation') = }") -print(f"{dataset[2].scalars.get('rotation') = }") +print(f"{dataset[0].get_scalar('rotation') = }") +print(f"{dataset[1].get_scalar('rotation') = }") +print(f"{dataset[2].get_scalar('rotation') = }") # %% [markdown] # ### Get Dataset scalars to tabular # %% # Print scalars in tabular format -print(f"{dataset.scalars.get_names() = }", end="\n\n") +print(f"{dataset.get_scalar_names( ) = }", end="\n\n") dprint("get rotation scalar = ", dataset.scalars.gets_to_tabular(["rotation"])) dprint("get speed scalar = ", dataset.scalars.gets_to_tabular(["speed"]), end="\n\n") @@ -288,8 +288,8 @@ def dprint(name: str, dictio: dict, end: str = "\n"): samples = [] for _ in range(nb_samples): sample = Sample() - sample.scalars.add("rotation", np.random.rand() + 1.0) - sample.scalars.add("random_name", np.random.rand() - 1.0) + sample.add_scalar("rotation", np.random.rand() + 1.0) + sample.add_scalar("random_name", np.random.rand() - 1.0) samples.append(sample) # Add a list of Samples diff --git a/examples/containers/sample_example.py b/examples/containers/sample_example.py index 77d111e1..f8b16820 100644 --- a/examples/containers/sample_example.py +++ b/examples/containers/sample_example.py @@ -32,7 +32,7 @@ def show_sample(sample: Sample): print(f"{sample = }") sample.show_tree() - print(f"{sample.scalars.get_names() = }") + print(f"{sample.get_scalar_names( ) = }") print(f"{sample.get_field_names() = }") @@ -92,14 +92,14 @@ def show_sample(sample: Sample): # %% # Add a rotation scalar to this Sample -sample.scalars.add("rotation", np.random.randn()) +sample.add_scalar("rotation", np.random.randn()) show_sample(sample) # %% # Add a more scalars to this Sample -sample.scalars.add("speed", np.random.randn()) -sample.scalars.add("other", np.random.randn()) +sample.add_scalar("speed", np.random.randn()) +sample.add_scalar("other", np.random.randn()) show_sample(sample) @@ -223,9 +223,9 @@ def show_sample(sample: Sample): # %% # It will look for a default base if no base and zone are given -print(f"{sample.scalars.get_names() = }") -print(f"{sample.scalars.get('omega') = }") -print(f"{sample.scalars.get('rotation') = }") +print(f"{sample.get_scalar_names( ) = }") +print(f"{sample.get_scalar('omega') = }") +print(f"{sample.get_scalar('rotation') = }") # %% [markdown] # ### Access fields data in Sample @@ -551,7 +551,7 @@ def show_sample(sample: Sample): show_sample(new_sample) -new_sample.scalars.add("a", 2.1) +new_sample.add_scalar("a", 2.1) serialized_sample = new_sample.model_dump() unserialized_sample = Sample.model_validate(serialized_sample) diff --git a/examples/convert_users_data_example.py b/examples/convert_users_data_example.py index a01b7d5e..70e54638 100644 --- a/examples/convert_users_data_example.py +++ b/examples/convert_users_data_example.py @@ -125,10 +125,10 @@ # Add random scalar values to the sample for sname in in_scalars_names: - sample.scalars.add(sname, np.random.randn()) + sample.add_scalar(sname, np.random.randn()) for sname in out_scalars_names: - sample.scalars.add(sname, np.random.randn()) + sample.add_scalar(sname, np.random.randn()) # Add random field values to the sample for j, sname in enumerate(out_fields_names): diff --git a/examples/pipelines/pipeline.py b/examples/pipelines/pipeline.py index 31719411..509bfddc 100644 --- a/examples/pipelines/pipeline.py +++ b/examples/pipelines/pipeline.py @@ -112,7 +112,7 @@ dataset_train = dataset_train.from_features_identifier(all_feature_id) print("dataset_train:", dataset_train) -print("scalar names =", dataset_train.scalars.get_names()) +print("scalar names =", dataset_train.get_scalar_names( )) print("field names =", dataset_train.get_field_names()) @@ -143,7 +143,7 @@ preprocessed_dataset = preprocessor.fit_transform(dataset_train) print("preprocessed_dataset:", preprocessed_dataset) -print("scalar names =", preprocessed_dataset.scalars.get_names()) +print("scalar names =", preprocessed_dataset.get_scalar_names( )) print("field names =", preprocessed_dataset.get_field_names()) diff --git a/examples/utils/init_with_tabular_example.py b/examples/utils/init_with_tabular_example.py index 070df75e..cc5986e5 100644 --- a/examples/utils/init_with_tabular_example.py +++ b/examples/utils/init_with_tabular_example.py @@ -62,7 +62,7 @@ def dprint(name: str, dictio: dict): # %% # Access and display the value of a particular scalar within a sample -scalar_value = sample_1.scalars.get("scalar_0") +scalar_value = sample_1.get_scalar("scalar_0") print("Scalar 'scalar_0' in Sample 1:", scalar_value) # %% diff --git a/examples/utils/stats_example.py b/examples/utils/stats_example.py index ed45b699..1b032e6d 100644 --- a/examples/utils/stats_example.py +++ b/examples/utils/stats_example.py @@ -116,8 +116,8 @@ def sprint(stats: dict): spatial_shape_max = 5 # for sample in samples: - sample.scalars.add("test_scalar", np.random.randn()) - sample.scalars.add("test_ND_scalar", np.random.randn(3)) + sample.add_scalar("test_scalar", np.random.randn()) + sample.add_scalar("test_ND_scalar", np.random.randn(3)) sample.init_base(2, 3,) zone_shape = np.array([0, 0, 0]) sample.init_zone(zone_shape) @@ -149,7 +149,7 @@ def sprint(stats: dict): samples = [Sample() for _ in range(nb_samples)] for sample in samples: - sample.scalars.add("test_scalar", np.random.randn()) + sample.add_scalar("test_scalar", np.random.randn()) sample.init_base(2, 3,) zone_shape = np.array([0, 0, 0]) sample.init_zone(zone_shape) diff --git a/src/plaid/containers/dataset.py b/src/plaid/containers/dataset.py index b1d3e7d6..41be75a3 100644 --- a/src/plaid/containers/dataset.py +++ b/src/plaid/containers/dataset.py @@ -365,7 +365,7 @@ def get_scalar_names(self, ids: Optional[list[int]] = None) -> list[str]: scalars_names = [] for sample in self.get_samples(ids, as_list=True): - s_names = sample.scalars.get_names() + s_names = sample.get_scalar_names( ) for s_name in s_names: if s_name not in scalars_names: scalars_names.append(s_name) @@ -511,7 +511,7 @@ def add_tabular_scalars( for i_samp in range(nb_samples): sample = Sample() for name in names: - sample.scalars.add(name, name_to_ids[name][i_samp]) + sample.add_scalar(name, name_to_ids[name][i_samp]) self.add_sample(sample) def get_scalars_to_tabular( @@ -547,7 +547,7 @@ def get_scalars_to_tabular( res = np.empty(nb_samples) res.fill(None) for i_, id in enumerate(sample_ids): - val = self[id].scalars.get(s_name) + val = self[id].get_scalar(s_name) if val is not None: res[i_] = val named_tabular[s_name] = res diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index 3787e842..23506bf1 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -204,9 +204,47 @@ def __init__( self._extra_data = None - @property - def scalars(self) -> SampleScalars: - return self._scalars + def get_scalar(self, name: str) -> Scalar | None: + """Retrieve a scalar value associated with the given name. + + Args: + name (str): The name of the scalar value to retrieve. + + Returns: + Scalar or None: The scalar value associated with the given name, or None if the name is not found. + """ + return self._scalars.get(name) + + def add_scalar(self, name: str, value: Scalar) -> None: + """Add a scalar value to a dictionary. + + Args: + name (str): The name of the scalar value. + value (Scalar): The scalar value to add or update in the dictionary. + """ + self._scalars.add(name, value) + + def del_scalar(self, name: str) -> Scalar: + """Delete a scalar value from the dictionary. + + Args: + name (str): The name of the scalar value to be deleted. + + Raises: + KeyError: Raised when there is no scalar / there is no scalar with the provided name. + + Returns: + Scalar: The value of the deleted scalar. + """ + return self._scalars.remove(name) + + def get_scalar_names(self) -> list[str]: + """Get a set of scalar names available in the object. + + Returns: + list[str]: A set containing the names of the available scalars. + """ + return self._scalars.get_names() def copy(self) -> Self: """Create a deep copy of the sample. @@ -1700,7 +1738,7 @@ def get_all_features_identifiers( list[FeatureIdentifier]: A list of dictionaries containing the identifiers of all features in the sample. """ all_features_identifiers = [] - for sn in self.scalars.get_names(): + for sn in self.get_scalar_names(): all_features_identifiers.append({"type": "scalar", "name": sn}) for tsn in self.get_time_series_names(): all_features_identifiers.append({"type": "time_series", "name": tsn}) @@ -1791,7 +1829,7 @@ def get_feature_from_string_identifier( arg_names = AUTHORIZED_FEATURE_INFOS[feature_type] if feature_type == "scalar": - val = self.scalars.get(feature_details[0]) + val = self.get_scalar(feature_details[0]) if val is None: raise KeyError(f"Unknown scalar {feature_details[0]}") return val @@ -1840,7 +1878,7 @@ def get_feature_from_identifier( ) if feature_type == "scalar": - return self.scalars.get(**feature_details) + return self.get_scalar(**feature_details) elif feature_type == "time_series": return self.get_time_series(**feature_details) elif feature_type == "field": @@ -1883,7 +1921,7 @@ def get_features_from_identifiers( features = [] for feature_type, feature_details in all_features_info: if feature_type == "scalar": - features.append(self.scalars.get(**feature_details)) + features.append(self.get_scalar(**feature_details)) elif feature_type == "time_series": features.append(self.get_time_series(**feature_details)) elif feature_type == "field": @@ -1919,7 +1957,7 @@ def _add_feature( if feature_type == "scalar": if safe_len(feature) == 1: feature = feature[0] - self.scalars.add(**feature_details, value=feature) + self.add_scalar(**feature_details, value=feature) elif feature_type == "time_series": self.add_time_series( **feature_details, time_sequence=feature[0], values=feature[1] @@ -2101,11 +2139,11 @@ def save(self, dir_path: Union[str, Path], overwrite: bool = False) -> None: ) logger.debug(f"save -> {status=}") - scalars_names = self.scalars.get_names() + scalars_names = self.get_scalar_names() if len(scalars_names) > 0: scalars = [] for s_name in scalars_names: - scalars.append(self.scalars.get(s_name)) + scalars.append(self.get_scalar(s_name)) scalars = np.array(scalars).reshape((1, -1)) header = ",".join(scalars_names) np.savetxt( @@ -2214,7 +2252,7 @@ def load(self, dir_path: Union[str, Path]) -> None: scalars_fname, dtype=float, skiprows=1, delimiter="," ).reshape((-1,)) for name, value in zip(names, scalars): - self.scalars.add(name, value) + self.add_scalar(name, value) time_series_files = list(dir_path.glob("time_series_*.csv")) for ts_fname in time_series_files: @@ -2236,7 +2274,7 @@ def __str__(self) -> str: str_repr = "Sample(" # scalars - nb_scalars = len(self.scalars.get_names()) + nb_scalars = len(self.get_scalar_names()) str_repr += f"{nb_scalars} scalar{'' if nb_scalars == 1 else 's'}, " # time series @@ -2290,7 +2328,7 @@ def serialize_model(self): "mesh_base_name": self._mesh_base_name, "mesh_zone_name": self._mesh_zone_name, "meshes": self._meshes, - "scalars": self.scalars._scalars, + "scalars": self._scalars._scalars, "time_series": self._time_series, "links": self._links, "paths": self._paths, diff --git a/src/plaid/post/bisect.py b/src/plaid/post/bisect.py index b73e4378..4d195260 100644 --- a/src/plaid/post/bisect.py +++ b/src/plaid/post/bisect.py @@ -48,10 +48,10 @@ def prepare_datasets( for i_sample in tqdm(range(n_samples), disable=not (verbose)): for sname in out_scalars_names: - ref = ref_dataset[i_sample].scalars.get(sname) + ref = ref_dataset[i_sample].get_scalar(sname) ref_out_scalars[sname].append(ref) - pred = pred_dataset[i_sample].scalars.get(sname) + pred = pred_dataset[i_sample].get_scalar(sname) pred_out_scalars[sname].append(pred) return ref_out_scalars, pred_out_scalars, out_scalars_names diff --git a/src/plaid/utils/init_with_tabular.py b/src/plaid/utils/init_with_tabular.py index 48cf9838..ce766457 100644 --- a/src/plaid/utils/init_with_tabular.py +++ b/src/plaid/utils/init_with_tabular.py @@ -62,7 +62,7 @@ def initialize_dataset_with_tabular_data( for i in range(nb_samples): sample = Sample() for scalar_name, value in tabular_data.items(): - sample.scalars.add(scalar_name, value[i]) + sample.add_scalar(scalar_name, value[i]) dataset.add_sample(sample) # TODO: diff --git a/src/plaid/utils/stats.py b/src/plaid/utils/stats.py index a1b2ed5b..d581f313 100644 --- a/src/plaid/utils/stats.py +++ b/src/plaid/utils/stats.py @@ -376,10 +376,10 @@ def _process_scalar_data(self, sample: Sample, data_dict: dict[str, list]) -> No sample (Sample): Sample containing scalar data data_dict (dict[str, list]): Dictionary to store processed data """ - for name in sample.scalars.get_names(): + for name in sample.get_scalar_names( ): if name not in data_dict: data_dict[name] = [] - value = sample.scalars.get(name) + value = sample.get_scalar(name) if value is not None: data_dict[name].append(np.array(value).reshape((1, -1))) diff --git a/tests/bridges/test_huggingface_bridge.py b/tests/bridges/test_huggingface_bridge.py index 2422b21d..577426e3 100644 --- a/tests/bridges/test_huggingface_bridge.py +++ b/tests/bridges/test_huggingface_bridge.py @@ -70,7 +70,7 @@ def assert_plaid_dataset(self, ds, pbdef): def assert_sample(self, sample): assert isinstance(sample, Sample) - assert sample.scalars.get_names()[0] == "test_scalar" + assert sample.get_scalar_names( )[0] == "test_scalar" assert "test_field_same_size" in sample.get_field_names() assert sample.get_field("test_field_same_size").shape[0] == 17 diff --git a/tests/conftest.py b/tests/conftest.py index ab372d29..c8fb9bc7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -29,8 +29,8 @@ def generate_samples(nb: int, zone_name: str, base_name: str) -> list[Sample]: sample = Sample() sample.init_base(3, 3, base_name) sample.init_zone(np.array([0, 0, 0]), zone_name=zone_name, base_name=base_name) - sample.scalars.add("test_scalar", float(i)) - sample.scalars.add("test_scalar_2", float(i**2)) + sample.add_scalar("test_scalar", float(i)) + sample.add_scalar("test_scalar_2", float(i**2)) sample.add_time_series( "test_time_series_1", np.arange(11, dtype=float), diff --git a/tests/containers/test_dataset.py b/tests/containers/test_dataset.py index 6c06fc08..fcd3bfb9 100644 --- a/tests/containers/test_dataset.py +++ b/tests/containers/test_dataset.py @@ -31,7 +31,7 @@ def current_directory(): def compare_two_samples(sample_1: Sample, sample_2: Sample): assert set(sample_1.get_all_mesh_times()) == set(sample_2.get_all_mesh_times()) - assert set(sample_1.scalars.get_names()) == set(sample_2.scalars.get_names()) + assert set(sample_1.get_scalar_names( )) == set(sample_2.get_scalar_names( )) assert set(sample_1.get_field_names()) == set(sample_2.get_field_names()) assert set(sample_1.get_time_series_names()) == set( sample_2.get_time_series_names() diff --git a/tests/containers/test_sample.py b/tests/containers/test_sample.py index 5eb61d35..9d01dddf 100644 --- a/tests/containers/test_sample.py +++ b/tests/containers/test_sample.py @@ -49,7 +49,7 @@ def other_sample(): @pytest.fixture() def sample_with_scalar(sample): - sample.scalars.add("test_scalar_1", np.random.randn()) + sample.add_scalar("test_scalar_1", np.random.randn()) return sample @@ -105,8 +105,8 @@ def sample_with_tree3d(sample, tree3d): def sample_with_tree_and_scalar_and_time_series( sample_with_tree, ): - sample_with_tree.scalars.add("r", np.random.randn()) - sample_with_tree.scalars.add("test_scalar_1", np.random.randn()) + sample_with_tree.add_scalar("r", np.random.randn()) + sample_with_tree.add_scalar("test_scalar_1", np.random.randn()) sample_with_tree.add_time_series( "test_time_series_1", np.arange(111, dtype=float), np.random.randn(111) ) @@ -596,43 +596,43 @@ def test_get_zone(self, sample, zone_name, base_name): # -------------------------------------------------------------------------# def test_get_scalar_names(self, sample): - assert sample.scalars.get_names() == [] + assert sample.get_scalar_names( ) == [] def test_get_scalar_empty(self, sample): - assert sample.scalars.get("missing_scalar_name") is None + assert sample.get_scalar("missing_scalar_name") is None def test_get_scalar(self, sample_with_scalar): - assert sample_with_scalar.scalars.get("missing_scalar_name") is None - assert sample_with_scalar.scalars.get("test_scalar_1") is not None + assert sample_with_scalar.get_scalar("missing_scalar_name") is None + assert sample_with_scalar.get_scalar("test_scalar_1") is not None def test_scalars_add_empty(self, sample_with_scalar): - assert isinstance(sample_with_scalar.scalars.get("test_scalar_1"), float) + assert isinstance(sample_with_scalar.get_scalar("test_scalar_1"), float) def test_scalars_add(self, sample_with_scalar): - sample_with_scalar.scalars.add("test_scalar_2", np.random.randn()) + sample_with_scalar.add_scalar("test_scalar_2", np.random.randn()) def test_del_scalar_unknown_scalar(self, sample_with_scalar): with pytest.raises(KeyError): - sample_with_scalar.scalars.remove("non_existent_scalar") + sample_with_scalar.del_scalar("non_existent_scalar") def test_del_scalar_no_scalar(self): sample = Sample() with pytest.raises(KeyError): - sample.scalars.remove("non_existent_scalar") + sample.del_scalar("non_existent_scalar") def test_del_scalar(self, sample_with_scalar): - assert len(sample_with_scalar.scalars.get_names()) == 1 + assert len(sample_with_scalar.get_scalar_names( )) == 1 - sample_with_scalar.scalars.add("test_scalar_2", np.random.randn(5)) - assert len(sample_with_scalar.scalars.get_names()) == 2 + sample_with_scalar.add_scalar("test_scalar_2", np.random.randn(5)) + assert len(sample_with_scalar.get_scalar_names( )) == 2 - scalar = sample_with_scalar.scalars.remove("test_scalar_1") - assert len(sample_with_scalar.scalars.get_names()) == 1 + scalar = sample_with_scalar.del_scalar("test_scalar_1") + assert len(sample_with_scalar.get_scalar_names( )) == 1 assert scalar is not None assert isinstance(scalar, float) - scalar = sample_with_scalar.scalars.remove("test_scalar_2") - assert len(sample_with_scalar.scalars.get_names()) == 0 + scalar = sample_with_scalar.del_scalar("test_scalar_2") + assert len(sample_with_scalar.get_scalar_names( )) == 0 assert scalar is not None assert isinstance(scalar, np.ndarray) @@ -1045,7 +1045,7 @@ def test_get_features_from_identifiers( def test_update_features_from_identifier( self, sample_with_tree_and_scalar_and_time_series ): - before = sample_with_tree_and_scalar_and_time_series.scalars.get("test_scalar_1") + before = sample_with_tree_and_scalar_and_time_series.get_scalar("test_scalar_1") sample_ = ( sample_with_tree_and_scalar_and_time_series.update_features_from_identifier( feature_identifiers={"type": "scalar", "name": "test_scalar_1"}, @@ -1053,7 +1053,7 @@ def test_update_features_from_identifier( in_place=False, ) ) - after = sample_.scalars.get("test_scalar_1") + after = sample_.get_scalar("test_scalar_1") assert after != before before = sample_with_tree_and_scalar_and_time_series.get_time_series( @@ -1161,14 +1161,14 @@ def test_from_features_identifier( sample_ = sample_with_tree_and_scalar_and_time_series.from_features_identifier( feature_identifiers={"type": "scalar", "name": "test_scalar_1"}, ) - assert sample_.scalars.get_names() == ["test_scalar_1"] + assert sample_.get_scalar_names( ) == ["test_scalar_1"] assert len(sample_.get_time_series_names()) == 0 assert len(sample_.get_field_names()) == 0 sample_ = sample_with_tree_and_scalar_and_time_series.from_features_identifier( feature_identifiers={"type": "time_series", "name": "test_time_series_1"}, ) - assert len(sample_.scalars.get_names()) == 0 + assert len(sample_.get_scalar_names( )) == 0 assert sample_.get_time_series_names() == ["test_time_series_1"] assert len(sample_.get_field_names()) == 0 @@ -1182,7 +1182,7 @@ def test_from_features_identifier( "time": 0.0, }, ) - assert len(sample_.scalars.get_names()) == 0 + assert len(sample_.get_scalar_names( )) == 0 assert len(sample_.get_time_series_names()) == 0 assert sample_.get_field_names() == ["test_node_field_1"] @@ -1194,7 +1194,7 @@ def test_from_features_identifier( "time": 0.0, }, ) - assert len(sample_.scalars.get_names()) == 0 + assert len(sample_.get_scalar_names( )) == 0 assert len(sample_.get_time_series_names()) == 0 assert len(sample_.get_field_names()) == 0 @@ -1204,7 +1204,7 @@ def test_from_features_identifier( {"type": "nodes"}, ], ) - assert len(sample_.scalars.get_names()) == 0 + assert len(sample_.get_scalar_names( )) == 0 assert len(sample_.get_time_series_names()) == 0 assert sample_.get_field_names() == ["test_node_field_1"] diff --git a/tests/utils/test_init_with_tabular.py b/tests/utils/test_init_with_tabular.py index 2af9c734..717d391d 100644 --- a/tests/utils/test_init_with_tabular.py +++ b/tests/utils/test_init_with_tabular.py @@ -52,7 +52,7 @@ def test_initialize_dataset_with_tabular_data( assert len(dataset) == nb_samples sample_1 = dataset[1] - scalar_value = sample_1.scalars.get("scalar_name_1") + scalar_value = sample_1.get_scalar("scalar_name_1") assert isinstance(scalar_value, float) def test_initialize_dataset_with_quantity_tabular_data( diff --git a/tests/utils/test_stats.py b/tests/utils/test_stats.py index 341142ca..e1da8116 100644 --- a/tests/utils/test_stats.py +++ b/tests/utils/test_stats.py @@ -59,7 +59,7 @@ def stats(): @pytest.fixture() def sample_with_scalar(np_samples_3): s = Sample() - s.scalars.add("foo", float(np_samples_3.mean())) + s.add_scalar("foo", float(np_samples_3.mean())) return s @@ -245,7 +245,7 @@ def test_get_stats(self, stats, samples): stats_dict = stats.get_stats() sample = samples[0] - feature_names = sample.scalars.get_names() + feature_names = sample.get_scalar_names( ) feature_names.extend( item for ts_name in sample.get_time_series_names() From 0cafa01b3417b31ab21db7cd205651ab17683fcf Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Wed, 27 Aug 2025 09:15:10 +0200 Subject: [PATCH 06/13] :recycle: All clean --- src/plaid/containers/collections.py | 12 ++++++------ src/plaid/containers/dataset.py | 2 +- src/plaid/containers/sample.py | 2 +- src/plaid/utils/stats.py | 2 +- tests/bridges/test_huggingface_bridge.py | 2 +- tests/containers/test_dataset.py | 2 +- tests/containers/test_sample.py | 20 ++++++++++---------- tests/utils/test_stats.py | 2 +- 8 files changed, 22 insertions(+), 22 deletions(-) diff --git a/src/plaid/containers/collections.py b/src/plaid/containers/collections.py index bf1d76d8..86f58053 100644 --- a/src/plaid/containers/collections.py +++ b/src/plaid/containers/collections.py @@ -38,7 +38,7 @@ class SampleScalars: """ def __init__(self, scalars: Optional[dict[str, Scalar]]) -> None: - self._scalars: dict[str, Scalar] = scalars if scalars is not None else {} + self.data: dict[str, Scalar] = scalars if scalars is not None else {} def add(self, name: str, value: Scalar) -> None: """Add a scalar value to a dictionary. @@ -48,7 +48,7 @@ def add(self, name: str, value: Scalar) -> None: value (Scalar): The scalar value to add or update in the dictionary. """ _check_names(name) - self._scalars[name] = value + self.data[name] = value def remove(self, name: str) -> Scalar: """Delete a scalar value from the dictionary. @@ -62,10 +62,10 @@ def remove(self, name: str) -> Scalar: Returns: Scalar: The value of the deleted scalar. """ - if name not in self._scalars: + if name not in self.data: raise KeyError(f"There is no scalar value with name {name}.") - return self._scalars.pop(name) + return self.data.pop(name) def get(self, name: str) -> Scalar | None: """Retrieve a scalar value associated with the given name. @@ -76,7 +76,7 @@ def get(self, name: str) -> Scalar | None: Returns: Scalar or None: The scalar value associated with the given name, or None if the name is not found. """ - return self._scalars.get(name) + return self.data.get(name) def get_names(self) -> list[str]: """Get a set of scalar names available in the object. @@ -84,4 +84,4 @@ def get_names(self) -> list[str]: Returns: list[str]: A set containing the names of the available scalars. """ - return sorted(self._scalars.keys()) + return sorted(self.data.keys()) diff --git a/src/plaid/containers/dataset.py b/src/plaid/containers/dataset.py index 41be75a3..0fdcaf90 100644 --- a/src/plaid/containers/dataset.py +++ b/src/plaid/containers/dataset.py @@ -365,7 +365,7 @@ def get_scalar_names(self, ids: Optional[list[int]] = None) -> list[str]: scalars_names = [] for sample in self.get_samples(ids, as_list=True): - s_names = sample.get_scalar_names( ) + s_names = sample.get_scalar_names() for s_name in s_names: if s_name not in scalars_names: scalars_names.append(s_name) diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index 23506bf1..ca7be916 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -2328,7 +2328,7 @@ def serialize_model(self): "mesh_base_name": self._mesh_base_name, "mesh_zone_name": self._mesh_zone_name, "meshes": self._meshes, - "scalars": self._scalars._scalars, + "scalars": self._scalars.data, "time_series": self._time_series, "links": self._links, "paths": self._paths, diff --git a/src/plaid/utils/stats.py b/src/plaid/utils/stats.py index d581f313..56ddc626 100644 --- a/src/plaid/utils/stats.py +++ b/src/plaid/utils/stats.py @@ -376,7 +376,7 @@ def _process_scalar_data(self, sample: Sample, data_dict: dict[str, list]) -> No sample (Sample): Sample containing scalar data data_dict (dict[str, list]): Dictionary to store processed data """ - for name in sample.get_scalar_names( ): + for name in sample.get_scalar_names(): if name not in data_dict: data_dict[name] = [] value = sample.get_scalar(name) diff --git a/tests/bridges/test_huggingface_bridge.py b/tests/bridges/test_huggingface_bridge.py index 577426e3..d54161cd 100644 --- a/tests/bridges/test_huggingface_bridge.py +++ b/tests/bridges/test_huggingface_bridge.py @@ -70,7 +70,7 @@ def assert_plaid_dataset(self, ds, pbdef): def assert_sample(self, sample): assert isinstance(sample, Sample) - assert sample.get_scalar_names( )[0] == "test_scalar" + assert sample.get_scalar_names()[0] == "test_scalar" assert "test_field_same_size" in sample.get_field_names() assert sample.get_field("test_field_same_size").shape[0] == 17 diff --git a/tests/containers/test_dataset.py b/tests/containers/test_dataset.py index fcd3bfb9..8930509e 100644 --- a/tests/containers/test_dataset.py +++ b/tests/containers/test_dataset.py @@ -31,7 +31,7 @@ def current_directory(): def compare_two_samples(sample_1: Sample, sample_2: Sample): assert set(sample_1.get_all_mesh_times()) == set(sample_2.get_all_mesh_times()) - assert set(sample_1.get_scalar_names( )) == set(sample_2.get_scalar_names( )) + assert set(sample_1.get_scalar_names()) == set(sample_2.get_scalar_names()) assert set(sample_1.get_field_names()) == set(sample_2.get_field_names()) assert set(sample_1.get_time_series_names()) == set( sample_2.get_time_series_names() diff --git a/tests/containers/test_sample.py b/tests/containers/test_sample.py index 9d01dddf..7a2fe4da 100644 --- a/tests/containers/test_sample.py +++ b/tests/containers/test_sample.py @@ -596,7 +596,7 @@ def test_get_zone(self, sample, zone_name, base_name): # -------------------------------------------------------------------------# def test_get_scalar_names(self, sample): - assert sample.get_scalar_names( ) == [] + assert sample.get_scalar_names() == [] def test_get_scalar_empty(self, sample): assert sample.get_scalar("missing_scalar_name") is None @@ -621,18 +621,18 @@ def test_del_scalar_no_scalar(self): sample.del_scalar("non_existent_scalar") def test_del_scalar(self, sample_with_scalar): - assert len(sample_with_scalar.get_scalar_names( )) == 1 + assert len(sample_with_scalar.get_scalar_names()) == 1 sample_with_scalar.add_scalar("test_scalar_2", np.random.randn(5)) - assert len(sample_with_scalar.get_scalar_names( )) == 2 + assert len(sample_with_scalar.get_scalar_names()) == 2 scalar = sample_with_scalar.del_scalar("test_scalar_1") - assert len(sample_with_scalar.get_scalar_names( )) == 1 + assert len(sample_with_scalar.get_scalar_names()) == 1 assert scalar is not None assert isinstance(scalar, float) scalar = sample_with_scalar.del_scalar("test_scalar_2") - assert len(sample_with_scalar.get_scalar_names( )) == 0 + assert len(sample_with_scalar.get_scalar_names()) == 0 assert scalar is not None assert isinstance(scalar, np.ndarray) @@ -1161,14 +1161,14 @@ def test_from_features_identifier( sample_ = sample_with_tree_and_scalar_and_time_series.from_features_identifier( feature_identifiers={"type": "scalar", "name": "test_scalar_1"}, ) - assert sample_.get_scalar_names( ) == ["test_scalar_1"] + assert sample_.get_scalar_names() == ["test_scalar_1"] assert len(sample_.get_time_series_names()) == 0 assert len(sample_.get_field_names()) == 0 sample_ = sample_with_tree_and_scalar_and_time_series.from_features_identifier( feature_identifiers={"type": "time_series", "name": "test_time_series_1"}, ) - assert len(sample_.get_scalar_names( )) == 0 + assert len(sample_.get_scalar_names()) == 0 assert sample_.get_time_series_names() == ["test_time_series_1"] assert len(sample_.get_field_names()) == 0 @@ -1182,7 +1182,7 @@ def test_from_features_identifier( "time": 0.0, }, ) - assert len(sample_.get_scalar_names( )) == 0 + assert len(sample_.get_scalar_names()) == 0 assert len(sample_.get_time_series_names()) == 0 assert sample_.get_field_names() == ["test_node_field_1"] @@ -1194,7 +1194,7 @@ def test_from_features_identifier( "time": 0.0, }, ) - assert len(sample_.get_scalar_names( )) == 0 + assert len(sample_.get_scalar_names()) == 0 assert len(sample_.get_time_series_names()) == 0 assert len(sample_.get_field_names()) == 0 @@ -1204,7 +1204,7 @@ def test_from_features_identifier( {"type": "nodes"}, ], ) - assert len(sample_.get_scalar_names( )) == 0 + assert len(sample_.get_scalar_names()) == 0 assert len(sample_.get_time_series_names()) == 0 assert sample_.get_field_names() == ["test_node_field_1"] diff --git a/tests/utils/test_stats.py b/tests/utils/test_stats.py index e1da8116..f8d791ae 100644 --- a/tests/utils/test_stats.py +++ b/tests/utils/test_stats.py @@ -245,7 +245,7 @@ def test_get_stats(self, stats, samples): stats_dict = stats.get_stats() sample = samples[0] - feature_names = sample.get_scalar_names( ) + feature_names = sample.get_scalar_names() feature_names.extend( item for ts_name in sample.get_time_series_names() From 3583b182672c469e6d4609f6b5bb57d6da027b25 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Wed, 27 Aug 2025 09:18:10 +0200 Subject: [PATCH 07/13] Unsupported | py3.9 --- src/plaid/containers/collections.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/plaid/containers/collections.py b/src/plaid/containers/collections.py index 86f58053..fada21fe 100644 --- a/src/plaid/containers/collections.py +++ b/src/plaid/containers/collections.py @@ -67,7 +67,7 @@ def remove(self, name: str) -> Scalar: return self.data.pop(name) - def get(self, name: str) -> Scalar | None: + def get(self, name: str) -> Optional[Scalar]: """Retrieve a scalar value associated with the given name. Args: From d283d57b9d356f01a87f874d058fa245f256b9df Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Wed, 27 Aug 2025 09:23:26 +0200 Subject: [PATCH 08/13] Unsupported symbol python 3.9 --- src/plaid/containers/sample.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index ca7be916..d590912d 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -185,12 +185,12 @@ def __init__( self._mesh_base_name: str = mesh_base_name self._mesh_zone_name: str = mesh_zone_name - self._meshes: dict[float, CGNSTree] | None = meshes + self._meshes: Optional[dict[float, CGNSTree]] = meshes self._scalars = SampleScalars(scalars) - self._time_series: dict[str, TimeSeries] | None = time_series + self._time_series: Optional[dict[str, TimeSeries]] = time_series - self._links: dict[float, list[LinkType]] | None = links - self._paths: dict[float, list[PathType]] | None = paths + self._links: Optional[dict[float, list[LinkType]]] = links + self._paths: Optional[dict[float, list[PathType]]] = paths if directory_path is not None: directory_path = Path(directory_path) From fb98e510f5dd48024f6c306672bc80232b847337 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Wed, 27 Aug 2025 09:27:06 +0200 Subject: [PATCH 09/13] Unsupported symbol python 3.9 --- src/plaid/containers/sample.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index d590912d..90e0ed7e 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -204,7 +204,7 @@ def __init__( self._extra_data = None - def get_scalar(self, name: str) -> Scalar | None: + def get_scalar(self, name: str) -> Optional[Scalar]: """Retrieve a scalar value associated with the given name. Args: From 7b7eae6c3cb2212b490e8802b877fb4e025e44a5 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Wed, 27 Aug 2025 09:34:56 +0200 Subject: [PATCH 10/13] Fix examples --- docs/source/notebooks/dataset.ipynb | 14 +++++++------- docs/source/notebooks/init_with_tabular.ipynb | 2 +- examples/containers/dataset_example.py | 14 +++++++------- examples/utils/init_with_tabular_example.py | 2 +- tests/utils/test_init_with_tabular.py | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/docs/source/notebooks/dataset.ipynb b/docs/source/notebooks/dataset.ipynb index 6593f6c5..bb6243b0 100644 --- a/docs/source/notebooks/dataset.ipynb +++ b/docs/source/notebooks/dataset.ipynb @@ -455,12 +455,12 @@ "# Print scalars in tabular format\n", "print(f\"{dataset.get_scalar_names( ) = }\", end=\"\\n\\n\")\n", "\n", - "dprint(\"get rotation scalar = \", dataset.scalars.gets_to_tabular([\"rotation\"]))\n", - "dprint(\"get speed scalar = \", dataset.scalars.gets_to_tabular([\"speed\"]), end=\"\\n\\n\")\n", + "dprint(\"get rotation scalar = \", dataset.get_scalars_to_tabular([\"rotation\"]))\n", + "dprint(\"get speed scalar = \", dataset.get_scalars_to_tabular([\"speed\"]), end=\"\\n\\n\")\n", "\n", "# Get specific scalars in tabular format\n", - "dprint(\"get specific scalars =\", dataset.scalars.gets_to_tabular([\"speed\", \"rotation\"]))\n", - "dprint(\"get all scalars =\", dataset.scalars.gets_to_tabular())" + "dprint(\"get specific scalars =\", dataset.get_scalars_to_tabular([\"speed\", \"rotation\"]))\n", + "dprint(\"get all scalars =\", dataset.get_scalars_to_tabular())" ] }, { @@ -470,7 +470,7 @@ "outputs": [], "source": [ "# Get specific scalars np.array\n", - "print(\"get all scalar arrays = \", dataset.scalars.gets_to_tabular(as_nparray=True))" + "print(\"get all scalar arrays = \", dataset.get_scalars_to_tabular(as_nparray=True))" ] }, { @@ -545,7 +545,7 @@ "dataset.merge_dataset(other_dataset)\n", "print(f\"after merge: {dataset = }\", end=\"\\n\\n\")\n", "\n", - "dprint(\"dataset scalars = \", dataset.scalars.gets_to_tabular())" + "dprint(\"dataset scalars = \", dataset.get_scalars_to_tabular())" ] }, { @@ -566,7 +566,7 @@ "dataset.add_tabular_scalars(new_scalars, names=[\"Tu\", \"random_name\"])\n", "\n", "print(f\"{dataset = }\")\n", - "dprint(\"dataset scalars =\", dataset.scalars.gets_to_tabular())" + "dprint(\"dataset scalars =\", dataset.get_scalars_to_tabular())" ] }, { diff --git a/docs/source/notebooks/init_with_tabular.ipynb b/docs/source/notebooks/init_with_tabular.ipynb index 7da205d9..6ee0f742 100644 --- a/docs/source/notebooks/init_with_tabular.ipynb +++ b/docs/source/notebooks/init_with_tabular.ipynb @@ -126,7 +126,7 @@ "source": [ "# Retrieve tabular data from the dataset based on scalar names\n", "scalar_names = [\"scalar_1\", \"scalar_3\", \"scalar_5\"]\n", - "tabular_data_subset = dataset.scalars.gets_to_tabular(scalar_names)\n", + "tabular_data_subset = dataset.get_scalars_to_tabular(scalar_names)\n", "print(\"Tabular Data Subset for Scalars 1, 3, and 5:\")\n", "dprint(\"tabular_data_subset\", tabular_data_subset)" ] diff --git a/examples/containers/dataset_example.py b/examples/containers/dataset_example.py index ddab819b..9dd3eda9 100644 --- a/examples/containers/dataset_example.py +++ b/examples/containers/dataset_example.py @@ -255,16 +255,16 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # Print scalars in tabular format print(f"{dataset.get_scalar_names( ) = }", end="\n\n") -dprint("get rotation scalar = ", dataset.scalars.gets_to_tabular(["rotation"])) -dprint("get speed scalar = ", dataset.scalars.gets_to_tabular(["speed"]), end="\n\n") +dprint("get rotation scalar = ", dataset.get_scalars_to_tabular(["rotation"])) +dprint("get speed scalar = ", dataset.get_scalars_to_tabular(["speed"]), end="\n\n") # Get specific scalars in tabular format -dprint("get specific scalars =", dataset.scalars.gets_to_tabular(["speed", "rotation"])) -dprint("get all scalars =", dataset.scalars.gets_to_tabular()) +dprint("get specific scalars =", dataset.get_scalars_to_tabular(["speed", "rotation"])) +dprint("get all scalars =", dataset.get_scalars_to_tabular()) # %% # Get specific scalars np.array -print("get all scalar arrays =", dataset.scalars.gets_to_tabular(as_nparray=True)) +print("get all scalar arrays =", dataset.get_scalars_to_tabular(as_nparray=True)) # %% [markdown] # ### Get Dataset fields @@ -305,7 +305,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): dataset.merge_dataset(other_dataset) print(f"after merge: {dataset = }", end="\n\n") -dprint("dataset scalars = ", dataset.scalars.gets_to_tabular()) +dprint("dataset scalars = ", dataset.get_scalars_to_tabular()) # %% [markdown] # ### Add tabular scalars to a Dataset @@ -316,7 +316,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): dataset.add_tabular_scalars(new_scalars, names=["Tu", "random_name"]) print(f"{dataset = }") -dprint("dataset scalars =", dataset.scalars.gets_to_tabular()) +dprint("dataset scalars =", dataset.get_scalars_to_tabular()) # %% [markdown] # ### Set additional information to a dataset diff --git a/examples/utils/init_with_tabular_example.py b/examples/utils/init_with_tabular_example.py index cc5986e5..6070fa07 100644 --- a/examples/utils/init_with_tabular_example.py +++ b/examples/utils/init_with_tabular_example.py @@ -68,6 +68,6 @@ def dprint(name: str, dictio: dict): # %% # Retrieve tabular data from the dataset based on scalar names scalar_names = ["scalar_1", "scalar_3", "scalar_5"] -tabular_data_subset = dataset.scalars.gets_to_tabular(scalar_names) +tabular_data_subset = dataset.get_scalars_to_tabular(scalar_names) print("Tabular Data Subset for Scalars 1, 3, and 5:") dprint("tabular_data_subset", tabular_data_subset) diff --git a/tests/utils/test_init_with_tabular.py b/tests/utils/test_init_with_tabular.py index 717d391d..1a5bcfc7 100644 --- a/tests/utils/test_init_with_tabular.py +++ b/tests/utils/test_init_with_tabular.py @@ -62,5 +62,5 @@ def test_initialize_dataset_with_quantity_tabular_data( assert len(dataset) == nb_samples # scalar_names = ["test_scalar", "test_1D_field", "test_2D_field"] - # tabular_data_subset = dataset.scalars.gets_to_tabular(scalar_names) + # tabular_data_subset = dataset.get_scalars_to_tabular(scalar_names) # assert isinstance(tabular_data_subset, dict) From 1f19f94568c5a5d552fa2fadb2e143efd5189f85 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Wed, 27 Aug 2025 09:42:22 +0200 Subject: [PATCH 11/13] pragma --- src/plaid/containers/sample.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index 90e0ed7e..f0f46ccb 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -1831,7 +1831,9 @@ def get_feature_from_string_identifier( if feature_type == "scalar": val = self.get_scalar(feature_details[0]) if val is None: - raise KeyError(f"Unknown scalar {feature_details[0]}") + raise KeyError( + f"Unknown scalar {feature_details[0]}" + ) # pragma: no cover return val elif feature_type == "time_series": return self.get_time_series(feature_details[0]) From 0641ee7b5e9e42e5834ff15027331aa5417007f2 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Thu, 28 Aug 2025 11:22:41 +0200 Subject: [PATCH 12/13] sync with main changes --- src/plaid/containers/sample.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/plaid/containers/sample.py b/src/plaid/containers/sample.py index 1e96e690..39e42df0 100644 --- a/src/plaid/containers/sample.py +++ b/src/plaid/containers/sample.py @@ -147,8 +147,8 @@ def __init__( meshes: Optional[dict[float, CGNSTree]] = None, scalars: Optional[dict[str, Scalar]] = None, time_series: Optional[dict[str, TimeSeries]] = None, - links: Optional[dict[float, list[LinkType]]] = None, - paths: Optional[dict[float, list[PathType]]] = None, + links: Optional[dict[float, list[CGNSLink]]] = None, + paths: Optional[dict[float, list[CGNSPath]]] = None, ) -> None: """Initialize an empty :class:`Sample `. @@ -189,8 +189,8 @@ def __init__( self._scalars = SampleScalars(scalars) self._time_series: Optional[dict[str, TimeSeries]] = time_series - self._links: Optional[dict[float, list[LinkType]]] = links - self._paths: Optional[dict[float, list[PathType]]] = paths + self._links: Optional[dict[float, list[CGNSLink]]] = links + self._paths: Optional[dict[float, list[CGNSPath]]] = paths if directory_path is not None: directory_path = Path(directory_path) From 266d72782d7efc8f6eed521767a05ba0b0c0cbb3 Mon Sep 17 00:00:00 2001 From: Brian Staber Date: Thu, 28 Aug 2025 11:33:57 +0200 Subject: [PATCH 13/13] remove dirty space between brackets --- docs/source/notebooks/dataset.ipynb | 10 +++++----- docs/source/notebooks/huggingface.ipynb | 2 +- docs/source/notebooks/pipeline.ipynb | 4 ++-- docs/source/notebooks/sample.ipynb | 4 ++-- examples/containers/dataset_example.py | 10 +++++----- examples/containers/sample_example.py | 4 ++-- examples/pipelines/pipeline.py | 4 ++-- 7 files changed, 19 insertions(+), 19 deletions(-) diff --git a/docs/source/notebooks/dataset.ipynb b/docs/source/notebooks/dataset.ipynb index bb6243b0..463efb4a 100644 --- a/docs/source/notebooks/dataset.ipynb +++ b/docs/source/notebooks/dataset.ipynb @@ -237,7 +237,7 @@ "print(f\"{sample_03 = }\", end=\"\\n\\n\")\n", "\n", "# Print sample scalar data\n", - "print(f\"{sample_03.get_scalar_names( ) = }\")\n", + "print(f\"{sample_03.get_scalar_names() = }\")\n", "print(f\"{sample_03.get_scalar('speed') = }\")\n", "print(f\"{sample_03.get_scalar('rotation') = }\", end=\"\\n\\n\")\n", "\n", @@ -422,9 +422,9 @@ "print(f\"{dataset[1] = }\") # getitem strategy\n", "print(f\"{dataset[2] = }\", end=\"\\n\\n\")\n", "\n", - "print(\"scalar of the first sample = \", dataset[0].get_scalar_names( ))\n", - "print(\"scalar of the second sample = \", dataset[1].get_scalar_names( ))\n", - "print(\"scalar of the third sample = \", dataset[2].get_scalar_names( ))" + "print(\"scalar of the first sample = \", dataset[0].get_scalar_names())\n", + "print(\"scalar of the second sample = \", dataset[1].get_scalar_names())\n", + "print(\"scalar of the third sample = \", dataset[2].get_scalar_names())" ] }, { @@ -453,7 +453,7 @@ "outputs": [], "source": [ "# Print scalars in tabular format\n", - "print(f\"{dataset.get_scalar_names( ) = }\", end=\"\\n\\n\")\n", + "print(f\"{dataset.get_scalar_names() = }\", end=\"\\n\\n\")\n", "\n", "dprint(\"get rotation scalar = \", dataset.get_scalars_to_tabular([\"rotation\"]))\n", "dprint(\"get speed scalar = \", dataset.get_scalars_to_tabular([\"speed\"]), end=\"\\n\\n\")\n", diff --git a/docs/source/notebooks/huggingface.ipynb b/docs/source/notebooks/huggingface.ipynb index 8b64dc63..1965a535 100644 --- a/docs/source/notebooks/huggingface.ipynb +++ b/docs/source/notebooks/huggingface.ipynb @@ -47,7 +47,7 @@ "def show_sample(sample: Sample):\n", " print(f\"sample = {sample}\")\n", " sample.show_tree()\n", - " print(f\"{sample.get_scalar_names( ) = }\")\n", + " print(f\"{sample.get_scalar_names() = }\")\n", " print(f\"{sample.get_field_names() = }\")" ] }, diff --git a/docs/source/notebooks/pipeline.ipynb b/docs/source/notebooks/pipeline.ipynb index 540496d5..9670e6ab 100644 --- a/docs/source/notebooks/pipeline.ipynb +++ b/docs/source/notebooks/pipeline.ipynb @@ -164,7 +164,7 @@ "source": [ "dataset_train = dataset_train.from_features_identifier(all_feature_id)\n", "print(\"dataset_train:\", dataset_train)\n", - "print(\"scalar names =\", dataset_train.get_scalar_names( ))\n", + "print(\"scalar names =\", dataset_train.get_scalar_names())\n", "print(\"field names =\", dataset_train.get_field_names())" ] }, @@ -216,7 +216,7 @@ "source": [ "preprocessed_dataset = preprocessor.fit_transform(dataset_train)\n", "print(\"preprocessed_dataset:\", preprocessed_dataset)\n", - "print(\"scalar names =\", preprocessed_dataset.get_scalar_names( ))\n", + "print(\"scalar names =\", preprocessed_dataset.get_scalar_names())\n", "print(\"field names =\", preprocessed_dataset.get_field_names())" ] }, diff --git a/docs/source/notebooks/sample.ipynb b/docs/source/notebooks/sample.ipynb index e9d8b5e6..2d0f421f 100644 --- a/docs/source/notebooks/sample.ipynb +++ b/docs/source/notebooks/sample.ipynb @@ -54,7 +54,7 @@ "def show_sample(sample: Sample):\n", " print(f\"sample = {sample}\")\n", " sample.show_tree()\n", - " print(f\"{sample.get_scalar_names( ) = }\")\n", + " print(f\"{sample.get_scalar_names() = }\")\n", " print(f\"{sample.get_field_names() = }\")" ] }, @@ -402,7 +402,7 @@ "outputs": [], "source": [ "# It will look for a default base if no base and zone are given\n", - "print(f\"{sample.get_scalar_names( ) = }\")\n", + "print(f\"{sample.get_scalar_names() = }\")\n", "print(f\"{sample.get_scalar('omega') = }\")\n", "print(f\"{sample.get_scalar('rotation') = }\")" ] diff --git a/examples/containers/dataset_example.py b/examples/containers/dataset_example.py index 9dd3eda9..0e56b7c9 100644 --- a/examples/containers/dataset_example.py +++ b/examples/containers/dataset_example.py @@ -137,7 +137,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): print(f"{sample_03 = }", end="\n\n") # Print sample scalar data -print(f"{sample_03.get_scalar_names( ) = }") +print(f"{sample_03.get_scalar_names() = }") print(f"{sample_03.get_scalar('speed') = }") print(f"{sample_03.get_scalar('rotation') = }", end="\n\n") @@ -238,9 +238,9 @@ def dprint(name: str, dictio: dict, end: str = "\n"): print(f"{dataset[1] = }") # getitem strategy print(f"{dataset[2] = }", end="\n\n") -print("scalar of the first sample = ", dataset[0].get_scalar_names( )) -print("scalar of the second sample = ", dataset[1].get_scalar_names( )) -print("scalar of the third sample = ", dataset[2].get_scalar_names( )) +print("scalar of the first sample = ", dataset[0].get_scalar_names()) +print("scalar of the second sample = ", dataset[1].get_scalar_names()) +print("scalar of the third sample = ", dataset[2].get_scalar_names()) # %% # Access dataset information @@ -253,7 +253,7 @@ def dprint(name: str, dictio: dict, end: str = "\n"): # %% # Print scalars in tabular format -print(f"{dataset.get_scalar_names( ) = }", end="\n\n") +print(f"{dataset.get_scalar_names() = }", end="\n\n") dprint("get rotation scalar = ", dataset.get_scalars_to_tabular(["rotation"])) dprint("get speed scalar = ", dataset.get_scalars_to_tabular(["speed"]), end="\n\n") diff --git a/examples/containers/sample_example.py b/examples/containers/sample_example.py index f8b16820..76db906c 100644 --- a/examples/containers/sample_example.py +++ b/examples/containers/sample_example.py @@ -32,7 +32,7 @@ def show_sample(sample: Sample): print(f"{sample = }") sample.show_tree() - print(f"{sample.get_scalar_names( ) = }") + print(f"{sample.get_scalar_names() = }") print(f"{sample.get_field_names() = }") @@ -223,7 +223,7 @@ def show_sample(sample: Sample): # %% # It will look for a default base if no base and zone are given -print(f"{sample.get_scalar_names( ) = }") +print(f"{sample.get_scalar_names() = }") print(f"{sample.get_scalar('omega') = }") print(f"{sample.get_scalar('rotation') = }") diff --git a/examples/pipelines/pipeline.py b/examples/pipelines/pipeline.py index 509bfddc..7ed63620 100644 --- a/examples/pipelines/pipeline.py +++ b/examples/pipelines/pipeline.py @@ -112,7 +112,7 @@ dataset_train = dataset_train.from_features_identifier(all_feature_id) print("dataset_train:", dataset_train) -print("scalar names =", dataset_train.get_scalar_names( )) +print("scalar names =", dataset_train.get_scalar_names()) print("field names =", dataset_train.get_field_names()) @@ -143,7 +143,7 @@ preprocessed_dataset = preprocessor.fit_transform(dataset_train) print("preprocessed_dataset:", preprocessed_dataset) -print("scalar names =", preprocessed_dataset.get_scalar_names( )) +print("scalar names =", preprocessed_dataset.get_scalar_names()) print("field names =", preprocessed_dataset.get_field_names())