@@ -86,15 +86,16 @@ The generic tensor network approach for solving MIS works best for graphs with s
86
86
``` julia
87
87
julia> using GraphTensorNetworks
88
88
89
- julia> gp = Independence (SimpleGraph (res. grid_graph); optimizer= TreeSA (ntrials= 1 , niters= 10 ), simplifier= MergeGreedy ());
89
+ julia> gp = IndependentSet (SimpleGraph (res. grid_graph); optimizer= TreeSA (ntrials= 1 , niters= 10 ), simplifier= MergeGreedy ());
90
90
91
- julia> misconfig = solve (gp, " config max" )[]. c;
91
+ julia> misconfig = solve (gp, SingleConfigMax ())[]. c. data
92
+ 10110001000110000111000001010101011000001111000001101010101010000101110100000010010101010101010001000000100111010000001001101000101010001110010001000101110100111010100010110100100110101010110100011100101010101010100011
92
93
93
94
# create a grid mask as the solution, where occupied locations are marked as value 1.
94
95
julia> c = zeros (Int, size (res. grid_graph. content));
95
96
96
97
julia> for (i, loc) in enumerate (findall (! isempty, res. grid_graph. content))
97
- c[loc] = misconfig. data [i]
98
+ c[loc] = misconfig[i]
98
99
end
99
100
100
101
julia> print_config (res, c)
@@ -131,7 +132,7 @@ julia> print_config(res, c)
131
132
132
133
#### Step 3: solve the MIS solution back an MIS of the source graph
133
134
``` julia
134
- julia> original_configs = map_configs_back (res, [c ])
135
+ julia> original_configs = map_configs_back (res, [misconfig ])
135
136
1 - element Vector{Vector{Int64}}:
136
137
[1 , 0 , 0 , 1 , 0 , 0 , 1 , 1 , 0 , 0 ]
137
138
@@ -150,31 +151,74 @@ julia> w_res = map_graph(Weighted(), g, vertex_order=Branching());
150
151
julia> println (w_res. grid_graph)
151
152
⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
152
153
⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
153
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
154
- ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ● ● ● ● ● ● ● ● ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅
155
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
156
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
157
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
158
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ⋅ ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ⋅ ● ⋅
159
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ● ● ● ⋅ ⋅ ● ⋅ ⋅
160
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
161
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
162
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ⋅ ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ⋅ ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
163
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ○ ⋅ ⋅
164
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
165
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
166
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ⋅ ● ⋅ ⋅ ● ⋅ ⋅
167
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
168
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
169
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
170
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ⋅ ⋅ ⋅
171
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅
172
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
173
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
174
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
175
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
154
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
155
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ● ● ● ⋅ ● ● ● ● ● ● ○ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ○ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ○ ⋅ ⋅ ⋅
156
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ○ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ○ ⋅ ⋅
157
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
158
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ▴ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
159
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ● ● ● ● ● ● ● ● ● ● ⋅ ● ● ● ● ● ● ○ ⋅ ● ⋅ ⋅ ● ⋅ ● ○ ⋅ ● ⋅ ⋅ ● ⋅ ⋅
160
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ○ ⋅ ⋅ ● ● ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
161
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
162
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
163
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ○ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
164
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ⋅ ● ⋅ ⋅ ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
165
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅
166
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅ ⋅ ● ⋅ ⋅
167
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ○ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅
168
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅
169
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅
170
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅
171
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ● ○ ⋅ ● ⋅
172
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ● ⋅ ● ● ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅
173
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅
174
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ○ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ○ ⋅ ⋅
175
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ● ⋅ ● ● ● ● ● ● ● ● ● ● ○ ⋅ ⋅ ⋅
176
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ● ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
176
177
⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
177
178
⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
178
179
⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
179
- ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
180
+ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅ ⋅
181
+ ```
182
+
183
+ Here, ` ▴ ` is a vertex having weight 3, ` ● ` is a vertex having weight 2 and ` ○ ` is a vertex having weight 1.
184
+ One can add some weights in range 0~ 1 by typing
185
+
186
+ ``` julia
187
+ julia> source_weights = 0.05 : 0.1 : 0.95
188
+ 0.05 : 0.1 : 0.95
189
+
190
+ julia> mapped_weights = map_weights (w_res, source_weights)
191
+ 218 - element Vector{Float64}:
192
+ 1.85
193
+ 2.0
194
+ 1.95
195
+ 2.0
196
+ 2.0
197
+ 2.0
198
+ 1.0
199
+ ⋮
200
+ 2.0
201
+ 2.0
202
+ 2.0
203
+ 2.0
204
+ 1.0
205
+ 2.0
206
+ ```
207
+
208
+ One can easily check the correctness
209
+ ``` julia
210
+ julia> wr1 = solve (IndependentSet (g; weights= collect (source_weights)), SingleConfigMax ())[]
211
+ (2.2 , ConfigSampler {10, 1, 1} (0100100110 ))ₜ
212
+
213
+ julia> wr2 = solve (IndependentSet (SimpleGraph (w_res. grid_graph); weights= mapped_weights), SingleConfigMax ())[]. c. data
214
+ (178.2 , ConfigSampler {218, 1, 4} (10001110100000111000110101000100010110010010110010010001010101100010011001010000101000100010101010010100001001101100000110010001010101010001100000110110010010111011000001111000010110101011010010101010101010101010100101 ))ₜ
215
+
216
+ julia> wr2. n - w_res. mis_overhead
217
+ 2.1999999999999886
218
+
219
+ julia> map_configs_back (w_res, [wr2])
220
+ 1 - element Vector{Vector{Int64}}:
221
+ [0 , 1 , 0 , 0 , 1 , 0 , 0 , 1 , 1 , 0 ]
180
222
```
223
+
224
+ Yep! We get exactly the same ground state.
0 commit comments