Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion src/power_grid_model_ds/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,8 @@
from power_grid_model_ds._core.model.graphs.container import GraphContainer
from power_grid_model_ds._core.model.grids.base import Grid

__all__ = ["Grid", "GraphContainer", "PowerGridModelInterface"]
__all__ = [
"Grid",
"GraphContainer",
"PowerGridModelInterface",
]
22 changes: 21 additions & 1 deletion src/power_grid_model_ds/_core/model/grids/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
from power_grid_model_ds._core.model.grids._text_sources import TextSource
from power_grid_model_ds._core.model.grids.helpers import set_feeder_ids, set_is_feeder
from power_grid_model_ds._core.utils.pickle import get_pickle_path, load_from_pickle, save_to_pickle
from power_grid_model_ds._core.utils.serialization import _load_grid_from_json, _save_grid_to_json
from power_grid_model_ds._core.utils.zip import file2gzip

Self = TypeVar("Self", bound="Grid")
Expand Down Expand Up @@ -360,7 +361,10 @@ def get_downstream_nodes(self, node_id: int, inclusive: bool = False):
)

def cache(self, cache_dir: Path, cache_name: str, compress: bool = True):
"""Cache Grid to a folder
"""Cache Grid to a folder using pickle format.

Note: Consider using save_to_json() for better
interoperability and standardized format.

Args:
cache_dir (Path): The directory to save the cache to.
Expand Down Expand Up @@ -435,6 +439,22 @@ def from_txt_file(cls, txt_file_path: Path):
txt_lines = f.readlines()
return TextSource(grid_class=cls).load_from_txt(*txt_lines)

def to_json(self, path: Path, **kwargs) -> Path:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this way, we'll need two extra methods for each serialization method we add in the future. I'd rather name this method serialize and add a method parameter once we include other serialization methods.

future example:

def serialize(self, path: Path, method: Literal["json", "msgpack", "parquet"], **kwargs):

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That sounds good

"""Serialize the grid to JSON format.

Args:
path: Destination file path to write JSON to.
**kwargs: Additional keyword arguments forwarded to ``json.dump``
Returns:
Path: The path where the file was saved.
"""
return _save_grid_to_json(grid=self, path=path, **kwargs)

@classmethod
def from_json(cls: Type[Self], path: Path) -> Self:
"""Deserialize the grid from JSON format."""
return _load_grid_from_json(path=path, target_grid_class=cls)

def set_feeder_ids(self):
"""Sets feeder and substation id properties in the grids arrays"""
set_is_feeder(grid=self)
Expand Down
114 changes: 114 additions & 0 deletions src/power_grid_model_ds/_core/utils/serialization.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
# SPDX-FileCopyrightText: Contributors to the Power Grid Model project <[email protected]>
#
# SPDX-License-Identifier: MPL-2.0

"""Serialization utilities for Grid objects using power-grid-model serialization with extensions support."""

import dataclasses
import json
import logging
from pathlib import Path
from typing import TYPE_CHECKING, Dict, Type, TypeVar

from power_grid_model_ds._core.model.arrays.base.array import FancyArray

if TYPE_CHECKING:
# Import only for type checking to avoid circular imports at runtime
from power_grid_model_ds._core.model.grids.base import Grid

G = TypeVar("G", bound=Grid)
else:
# Runtime: don't import Grid to avoid circular import; keep unbound TypeVar
G = TypeVar("G")

logger = logging.getLogger(__name__)


def _restore_grid_values(grid, json_data: Dict) -> None:
"""Restore arrays to the grid."""
for attr_name, attr_values in json_data.items():
if not hasattr(grid, attr_name):
continue

if not issubclass(getattr(grid, attr_name).__class__, FancyArray):
expected_type = grid.__dataclass_fields__[attr_name].type
cast_value = expected_type(attr_values)
setattr(grid, attr_name, cast_value)
continue

try:
array_field = grid.find_array_field(getattr(grid, attr_name).__class__)
matched_columns = {
Copy link
Member

@Thijss Thijss Oct 22, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

perhaps we can re-use PowerGridModelInterface._match_dtypes here (will have to move the code)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Interesting, that could work. However, that function compares two dtype objects and here we have one dtype object and a dict with columns as keys. So they do not match completely. Given the low complexity building a shared function might not be worthwile

col: attr_values["data"][col] for col in array_field.type().columns if col in attr_values["data"]
}
restored_array = array_field.type(**matched_columns)
setattr(grid, attr_name, restored_array)
except (AttributeError, KeyError, ValueError, TypeError) as e:
# Handle restoration failures:
# - KeyError: missing "dtype" or "data" keys
# - ValueError/TypeError: invalid dtype string or data conversion
# - AttributeError: grid methods/attributes missing
logger.warning(f"Failed to restore '{attr_name}': {e}")


def _save_grid_to_json(
grid,
path: Path,
**kwargs,
) -> Path:
"""Save a Grid object to JSON format using power-grid-model serialization with extensions support.

Args:
grid: The Grid object to serialize
path: The file path to save to
**kwargs: Keyword arguments forwarded to json.dump (for example, indent, sort_keys,
ensure_ascii, etc.).
Returns:
Path: The path where the file was saved
"""
path.parent.mkdir(parents=True, exist_ok=True)

serialized_data = {}
for field in dataclasses.fields(grid):
if field.name in ["graphs", "_id_counter"]:
continue

field_value = getattr(grid, field.name)
if isinstance(field_value, (int, float, str, bool)):
serialized_data[field.name] = field_value
continue

if not isinstance(field_value, FancyArray):
raise NotImplementedError(f"Serialization for field of type '{type(field_value)}' is not implemented.")

if field_value.size == 0:
continue

serialized_data[field.name] = {
"data": {name: field_value[name].tolist() for name in field_value.dtype.names},
}

# Write to file
with open(path, "w", encoding="utf-8") as f:
json.dump(serialized_data, f, **kwargs)

return path


def _load_grid_from_json(path: Path, target_grid_class: Type[G]) -> G:
"""Load a Grid object from JSON format with cross-type loading support.

Args:
path: The file path to load from
target_grid_class: Grid class to load into.

Returns:
Grid: The deserialized Grid object of the specified target class
"""
with open(path, "r", encoding="utf-8") as f:
input_data = json.load(f)

target_grid = target_grid_class.empty()
_restore_grid_values(target_grid, input_data)

return target_grid
206 changes: 206 additions & 0 deletions tests/unit/utils/test_serialization.py
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ik mis tests voor wanneer het crasht.

Original file line number Diff line number Diff line change
@@ -0,0 +1,206 @@
# SPDX-FileCopyrightText: Contributors to the Power Grid Model project <[email protected]>
#
# SPDX-License-Identifier: MPL-2.0

"""Comprehensive unit tests for Grid serialization with power-grid-model compatibility."""

from dataclasses import dataclass
from pathlib import Path

import numpy as np
import pytest
from numpy.typing import NDArray

from power_grid_model_ds import Grid
from power_grid_model_ds._core.model.arrays.base.array import FancyArray
from power_grid_model_ds._core.utils.serialization import (
_load_grid_from_json,
_save_grid_to_json,
)
from power_grid_model_ds.arrays import LineArray
from power_grid_model_ds.arrays import NodeArray as BaseNodeArray
from power_grid_model_ds.fancypy import array_equal


class ExtendedNodeArray(BaseNodeArray):
"""Test array with extended columns"""

_defaults = {"u": 0.0, "analysis_flag": 0}
u: NDArray[np.float64]
analysis_flag: NDArray[np.int32]


class ExtendedLineArray(LineArray):
"""Test array with extended columns"""

_defaults = {"i_from": 0.0, "loading_factor": 0.0}
i_from: NDArray[np.float64]
loading_factor: NDArray[np.float64]


@dataclass
class ExtendedGrid(Grid):
"""Test grid with extended arrays"""

node: ExtendedNodeArray
line: ExtendedLineArray

value_extension: float = 0.0
str_extension: str = "default"


@pytest.fixture
def basic_grid():
"""Basic grid fixture"""
return Grid.from_txt("1 2", "2 3", "S10 1")


@pytest.fixture
def extended_grid():
"""Extended grid fixture with additional columns"""
grid = ExtendedGrid.empty()
nodes = ExtendedNodeArray(
id=[1, 2, 3], u_rated=[10500, 10500, 10500], u=[10450, 10400, 10350], analysis_flag=[1, 0, 1]
)
lines = ExtendedLineArray(
id=[10, 11],
from_node=[1, 2],
to_node=[2, 3],
from_status=[1, 1],
to_status=[1, 1],
r1=[0.1, 0.15],
x1=[0.2, 0.25],
c1=[1e-6, 1.2e-6],
tan1=[0.0, 0.0],
i_n=[400, 350],
i_from=[150.5, 120.3],
loading_factor=[0.75, 0.68],
)
grid.append(nodes)
grid.append(lines)
return grid


class TestSerializationRoundtrips:
"""Test serialization across different formats and configurations"""

def test_basic_serialization_roundtrip(self, basic_grid: Grid, tmp_path: Path):
"""Test basic serialization roundtrip for all formats"""
path = tmp_path / "test.json"
result_path = _save_grid_to_json(basic_grid, path)
assert result_path.exists()

# Load and verify
loaded_grid = _load_grid_from_json(path, target_grid_class=Grid)
array_equal(loaded_grid.node, basic_grid.node)
array_equal(loaded_grid.line, basic_grid.line)
assert list(loaded_grid.node.id) == list(basic_grid.node.id)

def test_extended_serialization_roundtrip(self, extended_grid: ExtendedGrid, tmp_path: Path):
"""Test extended serialization preserving custom data"""
path = tmp_path / "extended.json"

_save_grid_to_json(extended_grid, path)
loaded_grid = _load_grid_from_json(path, target_grid_class=ExtendedGrid)

# Verify core data
assert loaded_grid.node.size == extended_grid.node.size
assert loaded_grid.line.size == extended_grid.line.size
assert loaded_grid.value_extension == extended_grid.value_extension
assert loaded_grid.str_extension == extended_grid.str_extension

# Verify extended data
np.testing.assert_array_equal(loaded_grid.node.u, extended_grid.node.u)
np.testing.assert_array_equal(loaded_grid.line.i_from, extended_grid.line.i_from)

def test_empty_grid_handling(self, tmp_path: Path):
"""Test serialization of empty grids"""
empty_grid = Grid.empty()

json_path = tmp_path / "empty.json"

# Should handle empty grids
_save_grid_to_json(empty_grid, json_path)

# Should load back as empty
loaded_json = _load_grid_from_json(json_path, target_grid_class=Grid)
assert loaded_json.node.size == 0


class TestCrossTypeCompatibility:
"""Test cross-type loading and compatibility"""

def test_basic_to_extended_loading(self, basic_grid: Grid, tmp_path: Path):
"""Test loading basic grid into extended type"""
path = tmp_path / "basic.json"

# Save basic grid
_save_grid_to_json(basic_grid, path)
loaded_grid = _load_grid_from_json(path, target_grid_class=ExtendedGrid)

# Core data should transfer
array_equal(loaded_grid.node, basic_grid.node)
array_equal(loaded_grid.line, basic_grid.line)

def test_extended_to_basic_loading(self, extended_grid: ExtendedGrid, tmp_path: Path):
"""Test loading extended grid into basic type"""
path = tmp_path / "extended.json"

# Save extended grid
_save_grid_to_json(extended_grid, path)
loaded_grid = _load_grid_from_json(path, target_grid_class=Grid)

# Core data should transfer
array_equal(loaded_grid.node, extended_grid.node)
array_equal(loaded_grid.line, extended_grid.line)


class TestExtensionHandling:
"""Test extension data handling and edge cases"""

def test_custom_array_serialization_roundtrip(self, tmp_path: Path):
"""Test serialization and loading of grids with custom arrays"""

# Create a custom array type that properly extends FancyArray
class CustomMetadataArray(FancyArray):
"""Custom metadata array for testing"""

_defaults = {"metadata_value": 0.0, "category": 0}

id: NDArray[np.int32]
metadata_value: NDArray[np.float64]
category: NDArray[np.int32]

# Create a grid with custom arrays
@dataclass
class GridWithCustomArray(Grid):
custom_metadata: CustomMetadataArray

# Create test grid with custom data
grid = GridWithCustomArray.empty()

# Add some basic grid data
nodes = grid.node.__class__(id=[1, 2], u_rated=[10000, 10000])
grid.append(nodes)

# Add custom metadata
custom_data = CustomMetadataArray(id=[100, 200, 300], metadata_value=[1.5, 2.5, 3.5], category=[1, 2, 1])
grid.custom_metadata = custom_data

# Test JSON serialization
json_path = tmp_path / "custom_array.json"
_save_grid_to_json(grid, json_path)

# Load back and verify
loaded_grid = _load_grid_from_json(json_path, target_grid_class=GridWithCustomArray)

# Verify core data
assert loaded_grid.node.size == 2
np.testing.assert_array_equal(loaded_grid.node.id, [1, 2])

# Verify custom array was preserved
assert hasattr(loaded_grid, "custom_metadata")
assert loaded_grid.custom_metadata.size == 3
np.testing.assert_array_equal(loaded_grid.custom_metadata.id, [100, 200, 300])
np.testing.assert_array_almost_equal(loaded_grid.custom_metadata.metadata_value, [1.5, 2.5, 3.5])
np.testing.assert_array_equal(loaded_grid.custom_metadata.category, [1, 2, 1])
3 changes: 2 additions & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.