Skip to content

Commit a549e22

Browse files
casenavefabiencasenavexroynard
authored
🎉 feat(sample.py) add option to overwrite folder in save (#130)
* feat(sample.py) add option to overwrite folder in save * feat(sample.py) add option to overwrite folder in save * (path) replace all uses of os by pathlib * (examples) replace all uses of os by pathlib * (docs) replace all uses of os by pathlib --------- Co-authored-by: Fabien Casenave <[email protected]> Co-authored-by: Xavier Roynard <[email protected]>
1 parent f8be860 commit a549e22

19 files changed

+290
-296
lines changed

docs/conf.py

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -14,23 +14,18 @@
1414
# -- Path setup --------------------------------------------------------------
1515

1616
import datetime
17-
import os
1817
import sys
1918
import subprocess
20-
21-
# If extensions (or modules to document with autodoc) are in another directory,
22-
# add these directories to sys.path here. If the directory is relative to the
23-
# documentation root, use os.path.abspath to make it absolute, like shown here.
24-
#
19+
from pathlib import Path
2520

2621
sys.path.insert(0, ".")
2722
sys.path.insert(0, "../")
2823

29-
basedir = os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
24+
basedir = Path(__file__).absolute().parent.parent
3025
sys.path.insert(0, basedir)
31-
sys.path.insert(0, os.path.join(basedir, "src/plaid"))
32-
# sys.path.insert(0, os.path.join(basedir, "tests"))
33-
sys.path.insert(0, os.path.join(basedir, "examples"))
26+
sys.path.insert(0, basedir / "src" / "plaid")
27+
# sys.path.insert(0, basedir / "tests")
28+
sys.path.insert(0, basedir / "examples")
3429
print(sys.path)
3530

3631

@@ -278,4 +273,4 @@ def get_git_info():
278273
return "unknown"
279274

280275
release = get_git_info()
281-
version = release
276+
version = release

docs/source/notebooks/bisect.ipynb

Lines changed: 21 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717
"outputs": [],
1818
"source": [
1919
"# Importing Required Libraries\n",
20-
"import os\n",
20+
"from typing import Union\n",
21+
"from pathlib import Path\n",
2122
"\n",
2223
"from plaid.containers.dataset import Dataset\n",
2324
"from plaid.post.bisect import plot_bisect, prepare_datasets\n",
@@ -30,24 +31,25 @@
3031
"metadata": {},
3132
"outputs": [],
3233
"source": [
33-
"def get_project_root(path: str, index=3) -> str:\n",
34+
"def get_project_root(path: Union[str, Path], index: int=3) -> Path:\n",
3435
" \"\"\"Find the project root path\n",
3536
"\n",
3637
" Args:\n",
37-
" path (str): Current path of the notebook\n",
38+
" path (Union[str, Path]): Current path of the notebook\n",
3839
" index (int, optional): The number of parents to go back. Defaults to 3.\n",
3940
"\n",
4041
" Returns:\n",
41-
" str: The project root path\n",
42+
" Path: The project root path\n",
4243
" \"\"\"\n",
44+
" path = Path(path)\n",
4345
" if index == 0:\n",
4446
" return path\n",
45-
" return get_project_root(os.path.dirname(path), index - 1)\n",
47+
" return get_project_root(path.parent, index - 1)\n",
4648
"\n",
4749
"\n",
4850
"# Setting up Directories\n",
49-
"current_directory = os.getcwd()\n",
50-
"dataset_directory = os.path.join(get_project_root(current_directory), \"tests\", \"post\")"
51+
"current_directory = Path.cwd()\n",
52+
"dataset_directory = get_project_root(current_directory) / \"tests\" / \"post\""
5153
]
5254
},
5355
{
@@ -66,9 +68,9 @@
6668
"outputs": [],
6769
"source": [
6870
"# Load PLAID datasets and problem metadata objects\n",
69-
"ref_ds = Dataset(os.path.join(dataset_directory, \"dataset_ref\"))\n",
70-
"pred_ds = Dataset(os.path.join(dataset_directory, \"dataset_near_pred\"))\n",
71-
"problem = ProblemDefinition(os.path.join(dataset_directory, \"problem_definition\"))\n",
71+
"ref_ds = Dataset(dataset_directory / \"dataset_ref\")\n",
72+
"pred_ds = Dataset(dataset_directory / \"dataset_near_pred\")\n",
73+
"problem = ProblemDefinition(dataset_directory / \"problem_definition\")\n",
7274
"\n",
7375
"# Get output scalars from reference and prediction dataset\n",
7476
"ref_out_scalars, pred_out_scalars, out_scalars_names = prepare_datasets(\n",
@@ -117,9 +119,9 @@
117119
"print(\"=== Plot with file paths ===\")\n",
118120
"\n",
119121
"# Load PLAID datasets and problem metadata from files\n",
120-
"ref_path = os.path.join(dataset_directory, \"dataset_ref\")\n",
121-
"pred_path = os.path.join(dataset_directory, \"dataset_pred\")\n",
122-
"problem_path = os.path.join(dataset_directory, \"problem_definition\")\n",
122+
"ref_path = dataset_directory / \"dataset_ref\"\n",
123+
"pred_path = dataset_directory / \"dataset_pred\"\n",
124+
"problem_path = dataset_directory / \"problem_definition\"\n",
123125
"\n",
124126
"# Using file paths to generate bisect plot on scalar_2\n",
125127
"plot_bisect(ref_path, pred_path, problem_path, \"scalar_2\", \"differ_bisect_plot\")"
@@ -143,9 +145,9 @@
143145
"print(\"=== Plot with PLAID objects ===\")\n",
144146
"\n",
145147
"# Load PLAID datasets and problem metadata objects\n",
146-
"ref_path = Dataset(os.path.join(dataset_directory, \"dataset_ref\"))\n",
147-
"pred_path = Dataset(os.path.join(dataset_directory, \"dataset_ref\"))\n",
148-
"problem_path = ProblemDefinition(os.path.join(dataset_directory, \"problem_definition\"))\n",
148+
"ref_path = Dataset(dataset_directory / \"dataset_ref\")\n",
149+
"pred_path = Dataset(dataset_directory / \"dataset_pred\")\n",
150+
"problem_path = ProblemDefinition(dataset_directory / \"problem_definition\")\n",
149151
"\n",
150152
"# Using PLAID objects to generate bisect plot on scalar_2\n",
151153
"plot_bisect(ref_path, pred_path, problem_path, \"scalar_2\", \"equal_bisect_plot\")"
@@ -169,9 +171,9 @@
169171
"print(\"=== Mix with scalar index and verbose ===\")\n",
170172
"\n",
171173
"# Mix\n",
172-
"ref_path = os.path.join(dataset_directory, \"dataset_ref\")\n",
173-
"pred_path = os.path.join(dataset_directory, \"dataset_near_pred\")\n",
174-
"problem_path = ProblemDefinition(os.path.join(dataset_directory, \"problem_definition\"))\n",
174+
"ref_path = dataset_directory / \"dataset_ref\"\n",
175+
"pred_path = dataset_directory / \"dataset_near_pred\"\n",
176+
"problem_path = ProblemDefinition(dataset_directory / \"problem_definition\")\n",
175177
"\n",
176178
"# Using scalar index and verbose option to generate bisect plot\n",
177179
"scalar_index = 0\n",

docs/source/notebooks/metrics.ipynb

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@
1717
"outputs": [],
1818
"source": [
1919
"# Importing Required Libraries\n",
20-
"import os\n",
20+
"from typing import Union\n",
21+
"from pathlib import Path\n",
2122
"\n",
2223
"from plaid.containers.dataset import Dataset\n",
2324
"from plaid.post.metrics import compute_metrics, prepare_datasets, pretty_metrics\n",
@@ -30,24 +31,25 @@
3031
"metadata": {},
3132
"outputs": [],
3233
"source": [
33-
"def get_project_root(path: str, index=3) -> str:\n",
34+
"def get_project_root(path: Union[str, Path], index=3) -> Path:\n",
3435
" \"\"\"Find the project root path\n",
3536
"\n",
3637
" Args:\n",
37-
" path (str): Current path of the notebook\n",
38+
" path (Union[str, Path]): Current path of the notebook\n",
3839
" index (int, optional): The number of parents to go back. Defaults to 3.\n",
3940
"\n",
4041
" Returns:\n",
41-
" str: The project root path\n",
42+
" Path: The project root path\n",
4243
" \"\"\"\n",
44+
" path = Path(path)\n",
4345
" if index == 0:\n",
4446
" return path\n",
45-
" return get_project_root(os.path.dirname(path), index - 1)\n",
47+
" return get_project_root(path.parent, index - 1)\n",
4648
"\n",
4749
"\n",
4850
"# Setting up Directories\n",
49-
"current_directory = os.getcwd()\n",
50-
"dataset_directory = os.path.join(get_project_root(current_directory), \"tests\", \"post\")"
51+
"current_directory = Path.cwd()\n",
52+
"dataset_directory = current_directory / \"tests\" / \"post\""
5153
]
5254
},
5355
{
@@ -66,9 +68,9 @@
6668
"outputs": [],
6769
"source": [
6870
"# Load PLAID datasets and problem metadata objects\n",
69-
"ref_ds = Dataset(os.path.join(dataset_directory, \"dataset_ref\"))\n",
70-
"pred_ds = Dataset(os.path.join(dataset_directory, \"dataset_near_pred\"))\n",
71-
"problem = ProblemDefinition(os.path.join(dataset_directory, \"problem_definition\"))\n",
71+
"ref_ds = Dataset(dataset_directory / \"dataset_ref\")\n",
72+
"pred_ds = Dataset(dataset_directory / \"dataset_near_pred\")\n",
73+
"problem = ProblemDefinition(dataset_directory / \"problem_definition\")\n",
7274
"\n",
7375
"# Get output scalars from reference and prediction dataset\n",
7476
"ref_out_scalars, pred_out_scalars, out_scalars_names = prepare_datasets(\n",
@@ -117,9 +119,9 @@
117119
"print(\"=== Metrics with file paths ===\")\n",
118120
"\n",
119121
"# Load PLAID datasets and problem metadata file paths\n",
120-
"ref_ds = os.path.join(dataset_directory, \"dataset_ref\")\n",
121-
"pred_ds = os.path.join(dataset_directory, \"dataset_near_pred\")\n",
122-
"problem = os.path.join(dataset_directory, \"problem_definition\")\n",
122+
"ref_ds = dataset_directory / \"dataset_ref\"\n",
123+
"pred_ds = dataset_directory / \"dataset_near_pred\"\n",
124+
"problem = dataset_directory / \"problem_definition\"\n",
123125
"\n",
124126
"# Using file paths to generate metrics\n",
125127
"metrics = compute_metrics(ref_ds, pred_ds, problem, \"first_metrics\")\n",
@@ -148,9 +150,9 @@
148150
"print(\"=== Metrics with PLAID objects and verbose ===\")\n",
149151
"\n",
150152
"# Load PLAID datasets and problem metadata objects\n",
151-
"ref_ds = Dataset(os.path.join(dataset_directory, \"dataset_ref\"))\n",
152-
"pred_ds = Dataset(os.path.join(dataset_directory, \"dataset_pred\"))\n",
153-
"problem = ProblemDefinition(os.path.join(dataset_directory, \"problem_definition\"))\n",
153+
"ref_ds = Dataset(dataset_directory / \"dataset_ref\")\n",
154+
"pred_ds = Dataset(dataset_directory / \"dataset_pred\")\n",
155+
"problem = ProblemDefinition(dataset_directory / \"problem_definition\")\n",
154156
"\n",
155157
"# Pretty print activated with verbose mode\n",
156158
"metrics = compute_metrics(ref_ds, pred_ds, problem, \"second_metrics\", verbose=True)"

docs/source/notebooks/problem_definition.ipynb

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
"outputs": [],
2525
"source": [
2626
"# Import required libraries\n",
27-
"import os\n",
27+
"from pathlib import Path\n",
2828
"\n",
2929
"import numpy as np"
3030
]
@@ -229,9 +229,9 @@
229229
"metadata": {},
230230
"outputs": [],
231231
"source": [
232-
"test_pth = f\"/tmp/test_safe_to_delete_{np.random.randint(1e10, 1e12)}\"\n",
233-
"pb_def_save_fname = os.path.join(test_pth, \"test\")\n",
234-
"os.makedirs(test_pth)\n",
232+
"test_pth = Path(f\"/tmp/test_safe_to_delete_{np.random.randint(1e10, 1e12)}\")\n",
233+
"pb_def_save_fname = test_pth / \"test\"\n",
234+
"test_pth.mkdir(parents=True, exist_ok=True)\n",
235235
"print(f\"saving path: {pb_def_save_fname}\")\n",
236236
"\n",
237237
"problem._save_to_dir_(pb_def_save_fname)"

docs/source/notebooks/sample.ipynb

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
"outputs": [],
2626
"source": [
2727
"# Import required libraries\n",
28-
"import os\n",
28+
"from pathlib import Path\n",
2929
"\n",
3030
"import numpy as np"
3131
]
@@ -255,9 +255,7 @@
255255
"metadata": {},
256256
"outputs": [],
257257
"source": [
258-
"path_linked_sample = os.path.join(\n",
259-
" os.getcwd(), \"dataset/samples/sample_000000000/meshes/mesh_000000000.cgns\"\n",
260-
")\n",
258+
"path_linked_sample = Path.cwd() / \"dataset/samples/sample_000000000/meshes/mesh_000000000.cgns\"\n",
261259
"new_sample_mult_mesh.link_tree(\n",
262260
" path_linked_sample, linked_sample=sample, linked_time=0.0, time=1.5\n",
263261
")\n",
@@ -929,10 +927,10 @@
929927
"metadata": {},
930928
"outputs": [],
931929
"source": [
932-
"test_pth = f\"/tmp/test_safe_to_delete_{np.random.randint(1e10, 1e12)}\"\n",
933-
"os.makedirs(test_pth)\n",
930+
"test_pth = Path(f\"/tmp/test_safe_to_delete_{np.random.randint(1e10, 1e12)}\")\n",
931+
"test_pth.mkdir(parents=True, exist_ok=True)\n",
934932
"\n",
935-
"sample_save_fname = os.path.join(test_pth, \"test\")\n",
933+
"sample_save_fname = test_pth / \"test\"\n",
936934
"print(f\"saving path: {sample_save_fname}\")\n",
937935
"\n",
938936
"sample.save(sample_save_fname)"
@@ -969,9 +967,9 @@
969967
"metadata": {},
970968
"outputs": [],
971969
"source": [
972-
"new_sample_2 = Sample.load_from_dir(os.path.join(test_pth, \"test\"))\n",
970+
"new_sample_2 = Sample.load_from_dir(test_pth / \"test\")\n",
973971
"\n",
974-
"show_sample(new_sample)"
972+
"show_sample(new_sample_2)"
975973
]
976974
},
977975
{

examples/containers/dataset_example.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
# %%
1616
# Import required libraries
1717
import os
18+
from pathlib import Path
1819

1920
# %%
2021
# Import necessary libraries and functions
@@ -388,8 +389,8 @@ def dprint(name: str, dictio: dict, end: str = "\n"):
388389
# ### Save the dataset to a TAR (Tape Archive) file
389390

390391
# %%
391-
tmpdir = f"/tmp/test_safe_to_delete_{np.random.randint(low=1, high=2_000_000_000)}"
392-
tmpfile = os.path.join(tmpdir, "test_file.plaid")
392+
tmpdir = Path(f"/tmp/test_safe_to_delete_{np.random.randint(low=1, high=2_000_000_000)}")
393+
tmpfile = tmpdir / "test_file.plaid"
393394

394395
print(f"Save dataset in: {tmpfile}")
395396
dataset.save(tmpfile)

examples/containers/sample_example.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414

1515
# %%
1616
# Import required libraries
17-
import os
17+
from pathlib import Path
1818

1919
# %%
2020
# Import necessary libraries and functions
@@ -517,10 +517,10 @@ def show_sample(sample: Sample):
517517
# ### Save Sample to as a file tree
518518

519519
# %%
520-
test_pth = f"/tmp/test_safe_to_delete_{np.random.randint(low=1, high=2_000_000_000)}"
521-
os.makedirs(test_pth)
520+
test_pth = Path(f"/tmp/test_safe_to_delete_{np.random.randint(low=1, high=2_000_000_000)}")
521+
test_pth.mkdir(parents=True, exist_ok=True)
522522

523-
sample_save_fname = os.path.join(test_pth, "test")
523+
sample_save_fname = test_pth / "test"
524524
print(f"saving path: {sample_save_fname}")
525525

526526
sample.save(sample_save_fname)
@@ -537,7 +537,7 @@ def show_sample(sample: Sample):
537537
# ### Load a Sample from a directory via the Sample class
538538

539539
# %%
540-
new_sample_2 = Sample.load_from_dir(os.path.join(test_pth, "test"))
540+
new_sample_2 = Sample.load_from_dir(test_pth / "test")
541541

542542
show_sample(new_sample)
543543

0 commit comments

Comments
 (0)