Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
66870ec
Added the Auditory Mouse Cortex dataset, loader and python notebook t…
marindigen Nov 19, 2025
71a86f0
Added functions collect_mat_files, mat_cell_to_dict, planewise_mat_ce…
marindigen Nov 19, 2025
31d8fb4
Added verify flag to the download_from_link_function, and included th…
marindigen Nov 20, 2025
495c253
Added test for download_file_from_link function
marindigen Nov 23, 2025
fcdb603
Added more documentation in the Notes of the download function
marindigen Nov 24, 2025
7b78289
Add triangle classification task to A123 dataset
marindigen Nov 25, 2025
e3955ad
Added tests for triangle common neighbour task
marindigen Nov 25, 2025
bf00900
Add GitHub Actions workflow for A123 dataset coverage testing
marindigen Nov 25, 2025
29eb264
Add CI coverage workflow and document pytest hooks
marindigen Nov 25, 2025
7b78ace
Fix CI workflow: use --no-build-isolation for PyG packages
marindigen Nov 25, 2025
128d98c
Fixing the dependency issue
marindigen Nov 25, 2025
ac50ef7
Fixed the tests, debugged loader and dataset. Have added downsampling…
marindigen Nov 25, 2025
2254ffa
Cleaned the tutorial and ran it for triangle_common_neighbors. Increa…
marindigen Nov 25, 2025
c920b7d
Fixed the test and removed the functions from conftest.py file. Moved…
marindigen Nov 25, 2025
2dc71ef
Debugging workflow
marindigen Nov 25, 2025
07de038
Debugging workflow
marindigen Nov 25, 2025
e33acea
Fixing workflow
marindigen Nov 25, 2025
d5e093b
Added argparser and documentation for it
marindigen Nov 25, 2025
f70fa7a
Removed the test_coverage_a123.yml workflow file
marindigen Nov 25, 2025
5e0a0f5
Kept the changes and the test class for download_file_from_link funct…
marindigen Nov 26, 2025
7b114c7
Converted the tutorial notebook into markdown
marindigen Nov 27, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ eggs/
.vscode/
lib/
logs/
lightning_logs/
lib64/
parts/
sdist/
Expand Down
57 changes: 57 additions & 0 deletions configs/dataset/graph/a123.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
# Config file for loading Bowen et al. mouse auditory cortex calcium imaging dataset.

# This script downloads and processes the original dataset introduced in:

# [Citation] Bowen et al. (2024), "Fractured columnar small-world functional network
# organization in volumes of L2/3 of mouse auditory cortex," PNAS Nexus, 3(2): pgae074.
# https://doi.org/10.1093/pnasnexus/pgae074

# We apply the preprocessing and graph-construction steps defined in this module to obtain
# a representation of neuronal activity suitable for our experiments.

# Please cite the original paper when using this dataset or any derivatives.

# Dataset loader config for A123 Cortex M
loader:
_target_: topobench.data.loaders.A123DatasetLoader
parameters:
data_domain: graph
data_type: A123CortexM
data_dir: ${paths.data_dir}/${dataset.loader.parameters.data_domain}/${dataset.loader.parameters.data_type} # Use data_dir from dataset config
data_name: a123_cortex_m # Use data_name from dataset config
num_graphs: 10
is_undirected: True
num_channels: ${dataset.parameters.num_features} # Use num_features for node feature dim
num_classes: ${dataset.parameters.num_classes} # Use num_classes for output dim
task: ${dataset.parameters.task} # Use task type from dataset config

# Dataset-specific parameters
parameters:
num_features: 3
task: classification
specific_task: classification # Current task selection (classification | triangle_classification | triangle_common_neighbors)
num_classes: 9
loss_type: cross_entropy
monitor_metric: accuracy
task_level: graph
min_neurons: 3
corr_threshold: 0.2


# Splits
split_params:
learning_setting: inductive
data_split_dir: ${paths.data_dir}/data_splits/${dataset.loader.parameters.data_name}
data_seed: 0
split_type: random # 'k-fold' or 'random' strategies
k: 10 # for "k-fold" Cross-Validation
train_prop: 0.7 # for "random" strategy splitting
val_prop: 0.15 # for "random" strategy splitting
test_prop: 0.15 # for "random" strategy splitting

# Dataloader parameters
dataloader_params:
batch_size: 32
num_workers: 0
pin_memory: False

2 changes: 1 addition & 1 deletion env_setup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ pip install -e '.[all]'

# Note that not all combinations of torch and CUDA are available
# See https://github.com/pyg-team/pyg-lib to check the configuration that works for you
TORCH="2.3.0" # available options: 2.0.0, 2.1.0, 2.2.0, 2.3.0, 2.4.0, ...
TORCH="2.1.0" # available options: 2.0.0, 2.1.0, 2.2.0, 2.3.0, 2.4.0, ...
CUDA="cpu" # if available, select the CUDA version suitable for your system
# available options: cpu, cu102, cu113, cu116, cu117, cu118, cu121, ...
pip install torch==${TORCH} --extra-index-url https://download.pytorch.org/whl/${CUDA}
Expand Down
Loading