Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ version = "0.3.0"

[deps]
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
DataStructures = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6"
JSON3 = "0f8b85d8-7281-11e9-16c2-39a750bddbf1"
Expand All @@ -18,6 +19,7 @@ StructTypes = "856f2bd8-1eba-4b0a-8007-ebc267875bd4"

[compat]
Conda = "^1.5.0"
DataFrames = "1.7.0"
DataStructures = "^0.18.11"
Graphs = "^1.4.1"
JSON3 = "^1.0.1"
Expand Down
4 changes: 3 additions & 1 deletion src/SimpleHypergraphs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
export dual
export random_model, random_kuniform_model, random_dregular_model, random_preferential_model

export Abstract_HG_format, HGF_Format, JSON_Format
export Abstract_HG_format, HGF_Format, JSON_Format, HIF_Format
export hg_load, hg_save

export modularity
Expand Down Expand Up @@ -62,7 +62,7 @@
catch e; end
has_plotting[] = has_networkx && has_hypernetx
if !has_plotting[]
@warn "The plotting functionality of HyperNetX will not work!\n"*

Check warning on line 65 in src/SimpleHypergraphs.jl

View workflow job for this annotation

GitHub Actions / Documentation

The plotting functionality of HyperNetX will not work! Conda Python networkx not found. Conda Python HyperNetX not found. To test your installation try running `using PyCall;pyimport("networkx");pyimport("hypernetx")`
(has_networkx ? "" : "Conda Python networkx not found.\n")*
(has_hypernetx ? "" : "Conda Python HyperNetX not found.\n")*
"To test your installation try running `using PyCall;pyimport(\"networkx\");pyimport(\"hypernetx\")`"
Expand All @@ -78,6 +78,8 @@
include("hypergraph.jl")
include("io.jl")

include("io_hif.jl")

include("models/bipartite.jl")
include("models/twosection.jl")
include("models/random-models.jl")
Expand Down
228 changes: 228 additions & 0 deletions src/io_hif.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
using JSON3
using DataFrames


struct HIF_Format <: Abstract_HG_format end


function hg_load(
io::IO,
format::HIF_Format;
T::Type{U} = Bool,
D::Type{<:AbstractDict{Int, U}} = Dict{Int, T},
sort_by_id::Bool=false,
show_warning::Bool=true,
) where {U<:Real}
data = JSON3.read(read(io, String), Dict{String, Any})

haskey(data, "incidences") || throw(ArgumentError("Missing required attribute 'incidences'"))

if isempty(data["incidences"])
if isempty(get(data, "edges", [])) && isempty(get(data, "nodes", []))
return Hypergraph{
T,
Union{Union{String, Int}, Dict{String, Any}},
Union{Union{String, Int}, Dict{String, Any}},
D,
}(0, 0)
elseif isempty(data["edges"]) || isempty(data["nodes"])
throw(ArgumentError("When incidences are empty, both 'nodes' and 'edges' must contain data"))
end
end

edges = build_edges_dataframe(data)
nodes = build_nodes_dataframe(data)

add_nodes_and_edges_from_incidences!(data, edges, nodes)

if sort_by_id
sort!(edges, (:edge))
sort!(nodes, (:node))
end

if show_warning
if edges.edge != 1:nrow(edges)
@warn "Edges in the source file were not sorted - their order was changed."
end

if nodes.node != 1:nrow(nodes)
@warn "Nodes in the source file were not sorted - their order was changed"
end
end

v_meta = Vector{Union{Union{String, Int}, Dict{String, Any}}}()
he_meta = Vector{Union{Union{String, Int}, Dict{String, Any}}}()

for row in eachrow(nodes)
attrs = row.attrs
if isnothing(attrs)
attrs = row.node
end
push!(v_meta, attrs)
end

for row in eachrow(edges)
attrs = row.attrs
if isnothing(attrs)
attrs = row.edge
end

push!(he_meta, attrs)
end

hg = Hypergraph{
T,
Union{Union{String, Int}, Dict{String, Any}},
Union{Union{String, Int}, Dict{String, Any}},
D,
}(nrow(nodes), nrow(edges), v_meta, he_meta)

add_weights_from_incidences!(data, hg, edges, nodes)

hg
end


function hg_load(
fname::String,
format::HIF_Format;
T::Type{U} = Bool,
D::Type{<:AbstractDict{Int, U}} = Dict{Int, T},
sort_by_id::Bool=false,
show_warning::Bool=true,
) where {U<:Real}
open(io -> hg_load(io, format, T=T, D=D, sort_by_id=sort_by_id, show_warning=show_warning), fname, "r")
end



function add_weights_from_incidences!(
data::Dict{String, Any},
hg::Hypergraph,
edges::DataFrame,
nodes::DataFrame,
)
edge_dict = Dict{Union{String, Int}, Int}(row.edge => idx for (row, idx) in zip(eachrow(edges), 1:nrow(edges)))
node_dict = Dict{Union{String, Int}, Int}(row.node => idx for (row, idx) in zip(eachrow(nodes), 1:nrow(nodes)))

incidences = data["incidences"]

for inc in incidences
edge_idx = edge_dict[inc["edge"]]
node_idx = node_dict[inc["node"]]

weight = (haskey(inc, "weight")) ? inc["weight"] : 1

hg[node_idx, edge_idx] = weight

end

end

function build_edges_dataframe(
data::Dict{String, Any},
)
edges = DataFrame(
;
edge=Union{String, Int}[],
attrs=Union{Nothing, Dict{String, Any}}[]
)

if !haskey(data, "edges")
return edges
end

seen = Set{Union{Int, String}}()

for edge in data["edges"]
if edge["edge"] ∈ seen
continue
end
attrs = (haskey(edge, "attrs")) ? edge["attrs"] : nothing

push!(edges, [edge["edge"], attrs])
push!(seen, edge["edge"])
end

edges
end

function build_nodes_dataframe(
data::Dict{String, Any},
)
nodes = DataFrame(
;
node=Union{String, Int}[],
attrs=Union{Nothing, Dict{String, Any}}[]
)

if !haskey(data, "nodes")
return nodes
end

seen = Set{Union{String, Int}}()

for node in data["nodes"]
if node["node"] ∈ seen
continue
end

attrs = (haskey(node, "attrs")) ? node["attrs"] : nothing

push!(nodes, [node["node"], attrs])
push!(seen, node["node"])
end

nodes
end


function add_nodes_and_edges_from_incidences!(
data::Dict{String, Any},
edges::DataFrame,
nodes::DataFrame,
)
edge_ids = Set{Union{String, Int}}(edges.edge)
node_ids = Set{Union{String, Int}}(nodes.node)
for incidence in data["incidences"]
node = incidence["node"]
edge = incidence["edge"]

if node ∉ node_ids
push!(nodes, [node, nothing])
push!(node_ids, node)
end

if edge ∉ edge_ids
push!(edges, [edge, nothing])
push!(edge_ids, edge)
end

end
end


"""
hg_save(io::IO, h::Hypergraph, format::HIF_Format)

Saves a hypergraph `h` to an output stream `io` in `HIF` format.

If `h` has `Composite Types` either for vertex metadata or hyperedges metadata,
the user has to explicit tell the JSON3 package about it, for instance using:

`JSON3.StructType(::Type{MyType}) = JSON3.Struct()`.

See the (JSON3.jl documentation)[https://github.com/quinnj/JSON3.jl] for more details.

"""
function hg_save(io::IO, h::Hypergraph{T, V, E, D}, format::HIF_Format) where {T, V, E, D}
incidences = Vector{Dict{String, Union{String, Int, T}}}()
for i in 1:nhv(h)
for j in sort!(collect(keys(gethyperedges(h, i))))
weight = h[i, j]
push!(incidences, Dict{String, Union{String, Int, T}}("edge" => i, "node" => j, "weight" => T(weight)))
end
end
json_hg = Dict{Symbol, typeof(incidences)}(:incidences => incidences)
JSON3.write(io, json_hg)
end
6 changes: 6 additions & 0 deletions test/data/HIF-standard/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
# HIF-standard

In this directory you can find files used to test import and export of Hypergraphs in the `HIF` format.

Read more about `HIF` here:
https://github.com/pszufe/HIF-standard
7 changes: 7 additions & 0 deletions test/data/HIF-standard/duplicated_nodes_edges.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"network-type": "undirected",
"metadata": {},
"nodes": [{"node": "n1"}, {"node": "n1"}],
"edges": [{"edge": "e1"}, {"edge": "e1"}],
"incidences": [{"edge": "e1", "node": "n1"}, {"edge": "e1", "node": "n1"}]
}
7 changes: 7 additions & 0 deletions test/data/HIF-standard/empty_arrays.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
{
"network-type": "undirected",
"metadata": {},
"incidences": [],
"nodes": [],
"edges": []
}
3 changes: 3 additions & 0 deletions test/data/HIF-standard/empty_hypergraph.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"incidences": []
}
15 changes: 15 additions & 0 deletions test/data/HIF-standard/metadata_with_deeply_nested_attributes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
{
"network-type": "asc",
"metadata": {
"level1": {
"level2": {
"level3": {
"key": "value"
}
}
}
},
"incidences": [{"edge": 1, "node": 2}],
"nodes": [{"node": "n1", "attrs": {"nested_attr": {"key1": "value1"}}}],
"edges": [{"edge": "e1", "attrs": {"nested_attr": {"key2": "value2"}}}]
}
13 changes: 13 additions & 0 deletions test/data/HIF-standard/metadata_with_nested_attributes.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"network-type": "asc",
"metadata": {
"creator": "nested_test",
"extra_info": {
"key1": "value1",
"key2": "value2"
}
},
"incidences": [{"edge": 10, "node": 20}],
"nodes": [{"node": 20, "attrs": {"color": "blue", "size": "large"}}],
"edges": [{"edge": 10, "attrs": {"priority": "high"}}]
}
5 changes: 5 additions & 0 deletions test/data/HIF-standard/missing_direction.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"network-type": "directed",
"metadata": {},
"incidences": [{"edge": 1, "node": 2}]
}
8 changes: 8 additions & 0 deletions test/data/HIF-standard/single_incidence.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
{
"incidences": [
{
"edge": "abcd",
"node": 42
}
]
}
12 changes: 12 additions & 0 deletions test/data/HIF-standard/single_incidence_with_attrs.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"incidences": [
{
"edge": "abcd",
"node": 42,
"attrs": {
"role": "PI",
"age": 42
}
}
]
}
9 changes: 9 additions & 0 deletions test/data/HIF-standard/single_incidence_with_weights.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"incidences": [
{
"edge": "abcd",
"node": 42,
"weight": -2
}
]
}
5 changes: 5 additions & 0 deletions test/data/HIF-standard/valid_incidence_head.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"network-type": "directed",
"metadata": {},
"incidences": [{"edge": 1, "node": 2, "direction": "head"}]
}
5 changes: 5 additions & 0 deletions test/data/HIF-standard/valid_incidence_tail.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
{
"network-type": "directed",
"metadata": {},
"incidences": [{"edge": 1, "node": 2, "direction": "tail"}]
}
Loading
Loading