diff --git a/attic/DataBlobs/FileDataEntryBlob.jl b/attic/DataBlobs/FileDataEntryBlob.jl deleted file mode 100644 index 25ae2580..00000000 --- a/attic/DataBlobs/FileDataEntryBlob.jl +++ /dev/null @@ -1,87 +0,0 @@ - -# @generated function ==(x::BlobEntry, y::BlobEntry) -# mapreduce(n -> :(x.$n == y.$n), (a,b)->:($a && $b), fieldnames(x)) -# end - -# -# getHash(entry::AbstractBlobEntry) = hex2bytes(entry.hash) - -##============================================================================== -## BlobEntry Common -##============================================================================== -blobfilename(entry::BlobEntry) = joinpath(entry.folder, "$(entry.id).dat") -entryfilename(entry::BlobEntry) = joinpath(entry.folder, "$(entry.id).json") - -##============================================================================== -## BlobEntry Blob CRUD -##============================================================================== - -function getBlob(dfg::AbstractDFG, entry::BlobEntry) - if isfile(blobfilename(entry)) - open(blobfilename(entry)) do f - return read(f) - end - else - throw(KeyError("Could not find file '$(blobfilename(entry))'.")) - # return nothing - end -end - -function addBlob!(dfg::AbstractDFG, entry::BlobEntry, data::Vector{UInt8}) - if isfile(blobfilename(entry)) - error("Key '$(entry.id)' blob already exists.") - elseif isfile(entryfilename(entry)) - error("Key '$(entry.id)' entry already exists, but no blob.") - else - open(blobfilename(entry), "w") do f - return write(f, data) - end - open(entryfilename(entry), "w") do f - return JSON.print(f, entry) - end - # FIXME update for entry.blobId vs entry.originId - return UUID(entry.id) - # return getBlob(dfg, entry)::Vector{UInt8} - end -end - -function updateBlob!(dfg::AbstractDFG, entry::BlobEntry, data::Vector{UInt8}) - if !isfile(blobfilename(entry)) - @warn "Entry '$(entry.id)' does not exist, adding." - return addBlob!(dfg, entry, data) - else - # perhaps add an explicit force update flag and error otherwise - @warn "Key '$(entry.id)' already exists, data will be overwritten." - deleteBlob!(dfg, entry) - return addBlob!(dfg, entry, data) - end -end - -function deleteBlob!(dfg::AbstractDFG, entry::BlobEntry) - data = getBlob(dfg, entry) - rm(blobfilename(entry)) - rm(entryfilename(entry)) - return data -end - -##============================================================================== -## BlobEntry CRUD Helpers -##============================================================================== - -function addData!( - ::Type{BlobEntry}, - dfg::AbstractDFG, - label::Symbol, - key::Symbol, - folder::String, - blob::Vector{UInt8}, - timestamp = now(localzone()); - id::UUID = uuid4(), - hashfunction = sha256, -) - fde = BlobEntry(key, id, folder, bytes2hex(hashfunction(blob)), timestamp) - blobId = addBlob!(dfg, fde, blob) |> UUID - newEntry = BlobEntry(fde; id = blobId, blobId) - de = addBlobEntry!(dfg, label, newEntry) - return de # de=>db -end diff --git a/attic/README.md b/attic/README.md deleted file mode 100644 index 86f5803c..00000000 --- a/attic/README.md +++ /dev/null @@ -1,9 +0,0 @@ -## Deprecated Drivers - -GraphsDFG is dependent on the archived Graphs.jl and no longer maintained from DFG v0.8. -See https://github.com/JuliaRobotics/DistributedFactorGraphs.jl/issues/398 for more detail. - -MetaGraphsDFG and SymbolDFG are unlikely to be revived again. - -The old Graphs (git tags v0.2.5 through v0.10.3) is now replaced by LightGraphs v1.3.5 and is functionally equivalent. -The new GraphsDFG is therefore an exact replacement of LightDFG to follow the transition of LightGraphs to Graphs \ No newline at end of file diff --git a/attic/SerializationOld.jl b/attic/SerializationOld.jl deleted file mode 100644 index d8486f36..00000000 --- a/attic/SerializationOld.jl +++ /dev/null @@ -1,777 +0,0 @@ - -# TODO dev and debugging, used by some of the DFG drivers -export _packSolverData - -# For all types that pack their type into their own structure (e.g. PPE) -const TYPEKEY = "_type" - -## Custom serialization -using JSON -import JSON.show_json -import JSON.Writer: StructuralContext, JSONContext, show_json -import JSON.Serializations: CommonSerialization, StandardSerialization -function JSON.show_json(io::JSONContext, serialization::CommonSerialization, uuid::UUID) - return print(io.io, "\"$uuid\"") -end - -## Version checking -# FIXME return VersionNumber -function _getDFGVersion() - if haskey(Pkg.dependencies(), Base.UUID("b5cc3c7e-6572-11e9-2517-99fb8daf2f04")) - return string( - Pkg.dependencies()[Base.UUID("b5cc3c7e-6572-11e9-2517-99fb8daf2f04")].version, - ) |> VersionNumber - else - # This is arguably slower, but needed for Travis. - return Pkg.TOML.parse( - read(joinpath(dirname(pathof(@__MODULE__)), "..", "Project.toml"), String), - )["version"] |> VersionNumber - end -end - -function _versionCheck(props::Dict{String, Any}) - if haskey(props, "_version") - if VersionNumber(props["_version"]) < _getDFGVersion() - @warn "This data was serialized using DFG $(props["_version"]) but you have $(_getDFGVersion()) installed, there may be deserialization issues." maxlog = - 10 - end - else - error( - "There isn't a version tag in this data so it's older than v0.10, deserialization expected to fail.", - ) - end -end - -## Utility functions for ZonedDateTime - -# Regex parser that converts clauses like ":59.82-" to well formatted ":59.820-" -function _fixSubseconds(a) - length(a) == 4 && return a[1:3] * ".000" * a[4] - frac = a[5:(length(a) - 1)] - frac = length(frac) > 3 ? frac[1:3] : frac * '0'^(3 - length(frac)) - return a[1:4] * frac * a[length(a)] -end - -function getStandardZDTString(stringTimestamp::String) - # Additional check+fix for the ultraweird "2020-08-12T12:00Z" - ts = replace(stringTimestamp, r"T(\d\d):(\d\d)(Z|z|\+|-)" => s"T\1:\2:00.000\3") - - # This is finding :59Z or :59.82-05:00 and fixing it to always have 3 subsecond digits. - # Temporary fix until TimeZones.jl gets an upstream fix. - return replace(ts, r":\d\d(\.\d+)?(Z|z|\+|-)" => _fixSubseconds) - # if occursin(r".*\:\d\d\+", packedProps["timestamp"]) - # ZonedDateTime(packedProps["timestamp"], "yyyy-mm-ddTHH:MM:SSzzzz") - # elseif occursin(r".*\:\d\d\.", packedProps["timestamp"]) - # ZonedDateTime(packedProps["timestamp"], "yyyy-mm-ddTHH:MM:SS.sss+zzzz") - # else - # ZonedDateTime(packedProps["timestamp"]) - # end -end - -# Corrects any `::ZonedDateTime` fields of T in corresponding `interm::Dict` as `dateformat"yyyy-mm-ddTHH:MM:SS.ssszzz"` -function standardizeZDTStrings!(T, interm::Dict) - for (name, typ) in zip(fieldnames(T), T.types) - if typ <: ZonedDateTime && haskey(interm, name) - namestr = string(name) - interm[namestr] = getStandardZDTString(interm[namestr]) - end - end - return nothing -end - -# variableType module.type string functions -function typeModuleName(variableType::InferenceVariable) - io = IOBuffer() - ioc = IOContext(io, :module => DistributedFactorGraphs) - show(ioc, typeof(variableType)) - return String(take!(io)) -end - -typeModuleName(varT::Type{<:InferenceVariable}) = typeModuleName(varT()) - -""" - $(SIGNATURES) -Get a type from the serialization module. -""" -function getTypeFromSerializationModule(_typeString::AbstractString) - @debug "DFG converting type string to Julia type" _typeString - try - # split the type at last `.` - split_st = split(_typeString, r"\.(?!.*\.)") - #if module is specified look for the module in main, otherwise use Main - if length(split_st) == 2 - m = getfield(Main, Symbol(split_st[1])) - else - m = Main - end - noparams = split(split_st[end], r"{") - ret = if 1 < length(noparams) - # fix #671, but does not work with specific module yet - bidx = findfirst(r"{", split_st[end])[1] - Core.eval(m, Base.Meta.parse("$(noparams[1])$(split_st[end][bidx:end])")) - # eval(Base.Meta.parse("Main.$(noparams[1])$(split_st[end][bidx:end])")) - else - getfield(m, Symbol(split_st[end])) - end - - return ret - - catch ex - @error "Unable to deserialize type $(_typeString)" - io = IOBuffer() - showerror(io, ex, catch_backtrace()) - err = String(take!(io)) - @error(err) - end - return nothing -end - -## ============================================================================= -## Transcoding / Unmarshal types helper -## ============================================================================= - -""" - $SIGNATURES -Should be a highly reusable function for any transcoding of intermediate type (or dict) to a desired output type. - -Notes: -- Using Base.@kwdef and JSON3.jl probably has better conversion logic than this function. -- This function was written to reduce dependency on Unmarshal.jl which was becoming stale. - -DevNotes -- See if this function just be deprecated to use JSON3 or similar. -- Do better with Union{Nothing, T} types (if this function is not replaced by JSON3) - -examples -```julia -Base.@kwdef struct HardType - name::String - time::DateTime = now(UTC) - val::Float64 = 0.0 -end - -# slight human overhead for each type to ignore extraneous field construction -# TODO, devnote drop this requirement with filter of _names in transcodeType -HardType(; - name::String, - time::DateTime = now(UTC), - val::Float64 = 0.0, - ignorekws... -) = HardType(name,time,val) - -# somehow one gets an intermediate type -imt = IntermediateType( - v"1.0", - "NotUsedYet", - "test", - now(UTC), - 1.0 -) -# or dict (testing string keys) -imd = Dict( - "_version" => v"1.0", - "_type" => "NotUsedYet", - "name" => "test", - "time" => now(UTC), - "val" => 1.0 -) -# ordered dict (testing symbol keys) -iod = OrderedDict( - :_version => v"1.0", - :_type => "NotUsedYet", - :name => "test", - :time => now(UTC), - # :val => 1.0 -) - -# do the transcoding to a slighly different hard type -T1 = transcodeType(HardType, imt) -T2 = transcodeType(HardType, imd) -T3 = transcodeType(HardType, iod) -``` -""" -function transcodeType(::Type{T}, inObj) where {T} - # - # specializations as inner functions (don't have to be inners) - # these few special cases came up with examples below, note recursions - _instance(S::Type, x) = S(x) - _instance(S::Type{Union{Nothing, UUID}}, x::String) = UUID(x) # special case - _instance(_::Type{S}, x::S) where {S} = x # if ambiguous, delete and do alternative `_instance(S::Type, x) = S===Any ? x : S(x)` - _instance(S::Type{I}, x::AbstractString) where {I <: Number} = Base.parse(I, x) - function _instance(S::Type{E}, x::AbstractVector) where {E <: AbstractVector} - return _instance.(eltype(E), x) - end - function _instance(S::Type{<:AbstractDict{K, V}}, x::AbstractDict) where {K, V} - return (tup = (Symbol.(keys(x)) .=> _instance.(V, values(x))); S(tup...)) - end - - # what the struct wants - _types = fieldtypes(T) - _names = fieldnames(T) - # (closure) resolve random ordering problem - _getIdx(s::Symbol) = findfirst(x -> x == s, _names) - # (closure) create an instance of a field - makething(k::Symbol, v) = begin - idx = _getIdx(k) - if !isnothing(idx) - # this field is in the output type and should be included - k => _instance(_types[_getIdx(k)], v) - else - # this field should be ignored in the output type - Symbol(:VOID_, rand(1:1000000)) => nothing - end - end - # zip together keys/fields and values for either dict or intermediate type - _srckeys(s::AbstractDict) = keys(s) - _srckeys(s) = fieldnames(typeof(s)) - _srcvals(s::AbstractDict) = values(s) - _srcvals(s) = map(k -> getproperty(s, k), _srckeys(s)) - # NOTE, improvement, filter extraneous fields not in _names - arr = [makething(Symbol(k), v) for (k, v) in zip(_srckeys(inObj), _srcvals(inObj))] - filter!(s -> s[1] in _names, arr) - # create dict provided fields into a NamedTuple as a type stable "pre-struct" - nt = (; arr...) - # use keyword constructors provided by Base.@kwdef to resolve random ordering, incomplete dicts, and defaults - return T(; nt...) -end - -##============================================================================== -## Variable Packing and unpacking -##============================================================================== -function packVariable(v::VariableCompute) - props = Dict{String, Any}() - props["label"] = string(v.label) - props["timestamp"] = v.timestamp - props["nstime"] = v.nstime.value - props["tags"] = v.tags - props["ppeDict"] = v.ppeDict - props["solverDataDict"] = (Dict( - keys(v.solverDataDict) .=> - map(vnd -> packVariableNodeData(vnd), values(v.solverDataDict)), - )) - props["metadata"] = v.smallData - props["solvable"] = v.solvable - props["variableType"] = typeModuleName(getVariableType(v)) - props["dataEntry"] = (Dict(keys(v.dataDict) .=> values(v.dataDict))) # map(bde -> JSON.json(bde), values(v.dataDict)))) - props["dataEntryType"] = - (Dict(keys(v.dataDict) .=> map(bde -> typeof(bde), values(v.dataDict)))) - props["_version"] = string(_getDFGVersion()) - return props #::Dict{String, Any} -end - -""" -$(SIGNATURES) - -Common unpack a Dict{String, Any} into a PPE. -""" -function _unpackPPE( - packedPPE::Dict{String, Any}; - _type = pop!(packedPPE, "_type"), # required for generic use -) - # !haskey(packedPPE, "_type") && error("Cannot find type key '_type' in packed PPE data") - if (_type === nothing || _type == "") - @warn "Cannot deserialize PPE, unknown type key, trying DistributedFactorGraphs.MeanMaxPPE" _type - _type = "DistributedFactorGraphs.MeanMaxPPE" - end - ppeType = getTypeFromSerializationModule(_type) - - pee = transcodeType.(ppeType, packedPPE) # from Dict to hard type - # ppe = Unmarshal.unmarshal( - # ppeType, - # packedPPE - # ) - - return ppe -end - -""" - $SIGNATURES -Returns a VariableCompute. - -DevNotes -- v0.19 packVariable fixed nested JSON bug on these fields, see #867: - - `tags`, `ppeDict`, `solverDataDict`, `smallData`, `dataEntry`, `dataEntryType` -""" -function unpackVariable( - dfg::AbstractDFG, - packedProps::Dict{String, Any}; - unpackPPEs::Bool = true, - unpackSolverData::Bool = true, - unpackBigData::Bool = haskey(packedProps, "dataEntryType") && - haskey(packedProps, "dataEntry"), - skipVersionCheck::Bool = false, -) - # - @debug "Unpacking variable:\r\n$packedProps" - # Version checking. - !skipVersionCheck && _versionCheck(packedProps) - id = if haskey(packedProps, "id") - UUID(packedProps["id"]) - else - nothing - end - label = Symbol(packedProps["label"]) - # Make sure that the timestamp is correctly formatted with subseconds - packedProps["timestamp"] = getStandardZDTString(packedProps["timestamp"]) - # Parse it - timestamp = ZonedDateTime(packedProps["timestamp"]) - nstime = Nanosecond(get(packedProps, "nstime", 0)) - - # FIXME, drop nested packing, see DFG #867 - # string serialization using packVariable and CGDFG serialization (Vector{String}) - tags_ = if packedProps["tags"] isa String - Symbol.(JSON2.read(packedProps["tags"], Vector{String})) - else - Symbol.(packedProps["tags"]) - end - - # FIXME, drop nested packing, see DFG #867 - ppeDict = if unpackPPEs && haskey(packedProps, "ppesDict") - JSON2.read(packedProps["ppeDict"], Dict{Symbol, MeanMaxPPE}) - elseif unpackPPEs && - haskey(packedProps, "ppes") && - packedProps["ppes"] isa AbstractVector - # these different cases are not well covered in tests, but first fix #867 - # TODO dont hardcode the ppeType (which is already discovered for each entry in _updatePPE) - ppedict = Dict{Symbol, MeanMaxPPE}() - for pd in packedProps["ppes"] - _type = get(pd, "_type", "DistributedFactorGraphs.MeanMaxPPE") - ppedict[Symbol(pd["solveKey"])] = _unpackPPE(pd; _type) - end - ppedict - else - Dict{Symbol, MeanMaxPPE}() - end - - smallData = if haskey(packedProps, "metadata") - if packedProps["metadata"] isa String - JSON2.read(packedProps["metadata"], Dict{Symbol, SmallDataTypes}) - elseif packedProps["metadata"] isa Dict - Dict{Symbol, SmallDataTypes}( - Symbol.(keys(packedProps["metadata"])) .=> - values(packedProps["metadata"]), - ) - # packedProps["metadata"] - else - @warn "unknown metadata deserialization on $label, type $(typeof(packedProps["metadata"]))" maxlog = - 10 - Dict{Symbol, SmallDataTypes}() - end - else - Dict{Symbol, SmallDataTypes}() - end - - variableTypeString = packedProps["variableType"] - - variableType = getTypeFromSerializationModule(variableTypeString) - isnothing(variableType) && - error("Cannot deserialize variableType '$variableTypeString' in variable '$label'") - pointType = getPointType(variableType) - - function _unpackSolverData(packedSolverData; oldkeys = false) - _ensureid!(s::Dict) = begin - s["id"] = haskey(s, "id") ? s["id"] : nothing - end - _ensureid!(s::PackedVariableNodeData) = s - packed = if packedSolverData isa String - # JSON2.read(packedSolverData, Dict{String, PackedVariableNodeData}) - # JSON3.read(packedSolverData, Dict{String, PackedVariableNodeData}) - jdc = JSON.parse(packedSolverData) - jpvd = Dict{String, PackedVariableNodeData}() - for (k, v) in jdc - _ensureid!(v) - jpvd[k] = transcodeType(PackedVariableNodeData, v) - end - jpvd - else - packedSolverData - end - packedvals = values(packed) - _ensureid!.(packedvals) - # @show keys(packed) - # TODO deprecate, this is for DFG18 compat only - packed_ = transcodeType.(PackedVariableNodeData, packedvals) # from Dict to hard type - unpacked_ = map(p -> unpackVariableNodeData(p), packed_) - keys_ = oldkeys ? Symbol.(keys(packed_)) : map(s -> s.solveKey, unpacked_) - return Dict{Symbol, VariableNodeData{variableType, pointType}}(keys_ .=> unpacked_) - end - - # FIXME, drop nested packing, see DFG #867 - solverData = if unpackSolverData && haskey(packedProps, "solverDataDict") - _unpackSolverData(packedProps["solverDataDict"]; oldkeys = false) - elseif unpackSolverData && haskey(packedProps, "solverData") - _unpackSolverData(packedProps["solverData"]) - else - Dict{Symbol, VariableNodeData{variableType, pointType}}() - end - # Rebuild VariableCompute using the first solver variableType in solverData - # @info "dbg Serialization 171" variableType Symbol(packedProps["label"]) timestamp nstime ppeDict solverData smallData Dict{Symbol,AbstractBlobEntry}() Ref(packedProps["solvable"]) - # variable = VariableCompute{variableType}(Symbol(packedProps["label"]), timestamp, nstime, Set(tags), ppeDict, solverData, smallData, Dict{Symbol,AbstractBlobEntry}(), Ref(packedProps["solvable"])) - - variable = VariableCompute{variableType}(; - id, - label = Symbol(packedProps["label"]), - # variableType = variableType, - timestamp, - nstime, - tags = Set{Symbol}(tags_), - ppeDict, - solverDataDict = solverData, - smallData = smallData, - dataDict = Dict{Symbol, AbstractBlobEntry}(), - solvable = Ref(Int(packedProps["solvable"])), - ) - # - - # Now rehydrate complete DataEntry type. - if unpackBigData - #TODO Deprecate - for backward compatibility between v0.8 and v0.9, remove in v0.10 - dataElemTypes = if packedProps["dataEntryType"] isa String - JSON2.read(packedProps["dataEntryType"], Dict{Symbol, String}) - else - # packedProps["dataEntryType"] - Dict{Symbol, String}( - Symbol.(keys(packedProps["dataEntryType"])) .=> - values(packedProps["dataEntryType"]), - ) - end - for (k, name) in dataElemTypes - val = split(string(name), '.')[end] - dataElemTypes[k] = val - end - - dataIntermed = if packedProps["dataEntry"] isa String - JSON2.read(packedProps["dataEntry"], Dict{Symbol, String}) - elseif packedProps["dataEntry"] isa NamedTuple - # case where JSON2 did unpacking of all fields as hard types (no longer String) - # Dict{Symbol, String}( Symbol.(keys(packedProps["dataEntry"])) .=> values(packedProps["dataEntry"]) ) - for i = 1:length(packedProps["dataEntry"]) - k = keys(packedProps["dataEntry"])[i] - bdeInter = values(packedProps["dataEntry"])[i] - objType = getfield(DistributedFactorGraphs, Symbol(dataElemTypes[k])) - # standardizeZDTStrings!(objType, bdeInter) - # fullVal = Unmarshal.unmarshal(objType, bdeInter) - variable.dataDict[k] = objType(; bdeInter...) - end - # forcefully skip, since variabe.dataDict already populated here - Dict{Symbol, String}() - else - Dict(Symbol.(keys(packedProps["dataEntry"])) .=> - values(packedProps["dataEntry"])) - end - - _doparse(s) = s - _doparse(s::String) = JSON.parse(s) - - for (k, bdeInter) in dataIntermed - interm = _doparse(bdeInter) # JSON.parse(bdeInter) # bdeInter - objType = getfield(DistributedFactorGraphs, Symbol(dataElemTypes[k])) - standardizeZDTStrings!(objType, interm) - fullVal = transcodeType(objType, interm) - # fullVal = Unmarshal.unmarshal(objType, interm) - variable.dataDict[k] = fullVal - end - end - - return variable -end - -# returns a PackedVariableNodeData -function packVariableNodeData(d::VariableNodeData{T}) where {T <: InferenceVariable} - @debug "Dispatching conversion variable -> packed variable for type $(string(d.variableType))" - castval = if 0 < length(d.val) - precast = getCoordinates.(T, d.val) - @cast castval[i, j] := precast[j][i] - castval - else - zeros(1, 0) - end - _val = castval[:] - return PackedVariableNodeData( - d.id, - _val, - size(castval, 1), - d.bw[:], - size(d.bw, 1), - d.BayesNetOutVertIDs, - d.dimIDs, - d.dims, - d.eliminated, - d.BayesNetVertID, - d.separator, - typeModuleName(d.variableType), - d.initialized, - d.infoPerCoord, - d.ismargin, - d.dontmargin, - d.solveInProgress, - d.solvedCount, - d.solveKey, - string(_getDFGVersion()), - ) -end - -function unpackVariableNodeData(d::PackedVariableNodeData) - @debug "Dispatching conversion packed variable -> variable for type $(string(d.variableType))" - # Figuring out the variableType - # TODO deprecated remove in v0.11 - for backward compatibility for saved variableTypes. - ststring = string(split(d.variableType, "(")[1]) - T = getTypeFromSerializationModule(ststring) - isnothing(T) && error( - "The variable doesn't seem to have a variableType. It needs to set up with an InferenceVariable from IIF. This will happen if you use DFG to add serialized variables directly and try use them. Please use IncrementalInference.addVariable().", - ) - - r3 = d.dimval - c3 = r3 > 0 ? floor(Int, length(d.vecval) / r3) : 0 - M3 = reshape(d.vecval, r3, c3) - @cast val_[j][i] := M3[i, j] - vals = Vector{getPointType(T)}(undef, length(val_)) - # vals = getPoint.(T, val_) - for (i, v) in enumerate(val_) - vals[i] = getPoint(T, v) - end - - r4 = d.dimbw - c4 = r4 > 0 ? floor(Int, length(d.vecbw) / r4) : 0 - BW = reshape(d.vecbw, r4, c4) - - # - return VariableNodeData{T, getPointType(T)}( - d.id, - vals, - BW, - Symbol.(d.BayesNetOutVertIDs), - d.dimIDs, - d.dims, - d.eliminated, - Symbol(d.BayesNetVertID), - Symbol.(d.separator), - T(), - d.initialized, - d.infoPerCoord, - d.ismargin, - d.dontmargin, - d.solveInProgress, - d.solvedCount, - Symbol(d.solveKey), - Dict{Symbol, Threads.Condition}(), - ) -end - -##============================================================================== -## Factor Packing and unpacking -##============================================================================== - -function _packSolverData(f::FactorCompute, fnctype::AbstractFactor) - # - packtype = convertPackedType(fnctype) - try - packed = convert(PackedFunctionNodeData{packtype}, getSolverData(f)) - packedJson = packed - return packedJson - catch ex - io = IOBuffer() - showerror(io, ex, catch_backtrace()) - err = String(take!(io)) - msg = "Error while packing '$(f.label)' as '$fnctype', please check the unpacking/packing converters for this factor - \r\n$err" - error(msg) - end -end - -# returns ::Dict{String, <:Any} -function packFactor(dfg::AbstractDFG, f::FactorCompute) - # Construct the properties to save - props = Dict{String, Any}() - props["id"] = f.id !== nothing ? string(f.id) : nothing - props["label"] = string(f.label) - props["timestamp"] = string(f.timestamp) - props["nstime"] = f.nstime.value - props["tags"] = f.tags - props["metadata"] = f.smallData - # Pack the node data - fnctype = getSolverData(f).fnc.usrfnc! - props["data"] = _packSolverData(f, fnctype) - # Include the type - props["fnctype"] = String(_getname(fnctype)) - props["_variableOrderSymbols"] = f._variableOrderSymbols # JSON2.write(f._variableOrderSymbols) - props["solvable"] = getSolvable(f) - props["_version"] = string(_getDFGVersion()) - return props -end - -function reconstFactorData() end - -function decodePackedType( - dfg::AbstractDFG, - varOrder::AbstractVector{Symbol}, - ::Type{T}, - packeddata::GenericFunctionNodeData{PT}, -) where {T <: FactorOperationalMemory, PT} - # - # TODO, to solve IIF 1424 - # variables = map(lb->getVariable(dfg, lb), varOrder) - - # Also look at parentmodule - usrtyp = convertStructType(PT) - fulltype = DFG.FunctionNodeData{T{usrtyp}} - factordata = reconstFactorData(dfg, varOrder, fulltype, packeddata) - return factordata -end - -# function Base.convert(::Type{PF}, nt::NamedTuple) where {PF <: AbstractPackedFactor} -# # Here we define a convention, must provide PackedType(;kw...) constructor, easiest is just use Base.@kwdef -# PF(;nt...) -# end - -# function Base.convert(::Type{GenericFunctionNodeData{P}}, nt::NamedTuple) where P -# GenericFunctionNodeData{P}( -# nt.eliminated, -# nt.potentialused, -# nt.edgeIDs, -# convert(P,nt.fnc), -# nt.multihypo, -# nt.certainhypo, -# nt.nullhypo, -# nt.solveInProgress, -# nt.inflation, -# ) -# end - -# TODO: REFACTOR THIS AS A JSON3 STRUCT DESERIALIZER. -function fncStringToData( - packtype::Type{<:AbstractPackedFactor}, - data::Union{String, <:NamedTuple}, -) - # Convert string to Named Tuples for kwargs - fncData = data isa AbstractString ? JSON2.read(data) : data - - packed = GenericFunctionNodeData{packtype}( - fncData.eliminated, - fncData.potentialused, - fncData.edgeIDs, - # NamedTuple args become kwargs with the splat - packtype(; fncData.fnc...), - fncData.multihypo, - fncData.certainhypo, - fncData.nullhypo, - fncData.solveInProgress, - fncData.inflation, - ) - return packed -end -function fncStringToData( - ::Type{T}, - data::PackedFunctionNodeData{T}, -) where {T <: AbstractPackedFactor} - return data -end -function fncStringToData( - fncType::String, - data::PackedFunctionNodeData{T}, -) where {T <: AbstractPackedFactor} - packtype = DFG.getTypeFromSerializationModule("Packed" * fncType) - if packtype == T - data - else - error( - "Unknown type conversion\n$(fncType)\n$packtype\n$(PackedFunctionNodeData{T})", - ) - end -end - -function fncStringToData(fncType::String, data::T) where {T <: AbstractPackedFactor} - packtype = DFG.getTypeFromSerializationModule("Packed" * fncType) - if packtype == T # || T <: packtype - data - else - fncStringToData(packtype, data) - end -end -function fncStringToData(fncType::String, data::Union{String, <:NamedTuple}) - packtype = DFG.getTypeFromSerializationModule("Packed" * fncType) - return fncStringToData(packtype, data) -end - -# Returns `::FactorCompute` -function unpackFactor( - dfg::G, - packedProps::Dict{String, Any}; - skipVersionCheck::Bool = false, -) where {G <: AbstractDFG} - # Version checking. - !skipVersionCheck && _versionCheck(packedProps) - - id = if haskey(packedProps, "id") && packedProps["id"] !== nothing - UUID(packedProps["id"]) - else - nothing - end - label = packedProps["label"] - - # various formats in which the timestamp might be stored - packedProps["timestamp"] = getStandardZDTString(packedProps["timestamp"]) - timestamp = ZonedDateTime(packedProps["timestamp"]) - nstime = Nanosecond(get(packedProps, "nstime", 0)) - - # Get the stored tags and variable order - @assert !(packedProps["tags"] isa String) "unpackFactor expecting JSON only data, packed `tags` should be a vector of strings (not a single string of elements)." - @assert !(packedProps["_variableOrderSymbols"] isa String) "unpackFactor expecting JSON only data, packed `_variableOrderSymbols` should be a vector of strings (not a single string of elements)." - tags = Symbol.(packedProps["tags"]) - _variableOrderSymbols = Symbol.(packedProps["_variableOrderSymbols"]) - - data = packedProps["data"] - if (data isa AbstractString) - data = JSON2.read(data) - end - datatype = packedProps["fnctype"] - @debug "DECODING factor type = '$(datatype)' for factor '$label'" - # packtype = getTypeFromSerializationModule("Packed"*datatype) - - # FIXME type instability from nothing to T - packed = nothing - fullFactorData = nothing - - try - packed = fncStringToData(datatype, data) #convert(GenericFunctionNodeData{packtype}, data) - decodeType = getFactorOperationalMemoryType(dfg) - fullFactorData = decodePackedType(dfg, _variableOrderSymbols, decodeType, packed) - catch ex - io = IOBuffer() - showerror(io, ex, catch_backtrace()) - err = String(take!(io)) - msg = "Error while unpacking '$label' as '$datatype', please check the unpacking/packing converters for this factor - \r\n$err" - error(msg) - end - - solvable = packedProps["solvable"] - - smallData = if haskey(packedProps, "metadata") - Dict{Symbol, SmallDataTypes}( - Symbol.(keys(packedProps["metadata"])) .=> values(packedProps["metadata"]), - ) - else - Dict{Symbol, SmallDataTypes}() - end - - # Rebuild FactorCompute - #TODO use constuctor to create factor - factor = FactorCompute( - Symbol(label), - timestamp, - nstime, - Set(tags), - fullFactorData, - solvable, - Tuple(_variableOrderSymbols); - id = id, - smallData = smallData, - ) - # - - # Note, once inserted, you still need to call rebuildFactorMetadata! - return factor -end - -##============================================================================== -## Serialization -##============================================================================== diff --git a/attic/tests/testTranscodeTypeUnmarshaling.jl b/attic/tests/testTranscodeTypeUnmarshaling.jl deleted file mode 100644 index f8d7965a..00000000 --- a/attic/tests/testTranscodeTypeUnmarshaling.jl +++ /dev/null @@ -1,101 +0,0 @@ - -# test transcoding and unmarshal util - -using Test -using DistributedFactorGraphs -using DataStructures: OrderedDict -using Dates - -## - -Base.@kwdef struct HardType - name::String - time::DateTime = now(UTC) - val::Float64 = 0.0 -end -# # slight human overhead for each type to ignore extraneous field construction -# # TODO, devnote drop this requirement with filter of _names in transcodeType -# HardType(; -# name::String, -# time::DateTime = now(UTC), -# val::Float64 = 0.0, -# ignorekws... -# ) = HardType(name,time,val) - -@testset "Test transcoding of Intermediate, Dict, OrderedDict to a HardType" begin - struct IntermediateType - _version::Any - _type::Any - name::Any - time::Any - val::Any - end - - # somehow one gets an intermediate type - imt = IntermediateType(v"1.0", "NotUsedYet", "test", now(UTC), 1.0) - # or dict (testing string keys) - imd = Dict( - "_version" => v"1.0", - "_type" => "NotUsedYet", - "name" => "test", - "time" => now(UTC), - "val" => 1.0, - ) - # ordered dict (testing symbol keys) - iod = OrderedDict( - :_version => v"1.0", - :_type => "NotUsedYet", - :name => "test", - :time => now(UTC), - # :val => 1.0 - ) - - # do the transcoding to a slighly different hard type - T1 = DistributedFactorGraphs.transcodeType(HardType, imt) - T2 = DistributedFactorGraphs.transcodeType(HardType, imd) - T3 = DistributedFactorGraphs.transcodeType(HardType, iod) -end - -Base.@kwdef struct MyType{T <: Real} - tags::Vector{Symbol} = Symbol[] - count::Int - funfun::Complex{T} = 1 + 5im - somedata::Dict{Symbol, Any} = Dict{Symbol, Any}() - data::Vector{Float64} = zeros(0) - binary::Vector{UInt8} = Vector{UInt8}() -end - -@testset "More super unmarshaling tests of various test dicts" begin - d = Dict("count" => 3) - DistributedFactorGraphs.transcodeType(MyType, d) - - d2 = Dict("count" => 3, "tags" => Any["hi", "okay"]) - DistributedFactorGraphs.transcodeType(MyType, d2) - - d3 = Dict("count" => "3", "tags" => String["hi", "okay"]) - DistributedFactorGraphs.transcodeType(MyType, d3) - - d4 = Dict("count" => 3.0, "funfun" => "8 - 3im", "tags" => Any["hi", "okay"]) - DistributedFactorGraphs.transcodeType(MyType{Float32}, d4) - - d5 = Dict( - "count" => 3, - "somedata" => Dict{String, Any}( - "calibration" => [1.1; 2.2], - "description" => "this is a test", - ), - ) - DistributedFactorGraphs.transcodeType(MyType{Float64}, d5) - - d6 = Dict("count" => 3.0, "data" => Any[10, 60]) - DistributedFactorGraphs.transcodeType(MyType, d6) - - d7 = Dict("count" => 3.0, "data" => String["10", "60"]) - DistributedFactorGraphs.transcodeType(MyType, d7) - - d8 = Dict("count" => 4, "binary" => take!(IOBuffer("hello world"))) - DistributedFactorGraphs.transcodeType(MyType, d8) - - d9 = Dict("count" => 4, "somedata" => Dict{Symbol, Any}(:test => "no ambiguity")) - DistributedFactorGraphs.transcodeType(MyType, d9) -end diff --git a/src/DataBlobs/entities/BlobStores.jl b/src/DataBlobs/entities/BlobStores.jl index 5821a9a3..5fce2617 100644 --- a/src/DataBlobs/entities/BlobStores.jl +++ b/src/DataBlobs/entities/BlobStores.jl @@ -30,3 +30,8 @@ Design goal: all `Blobstore`s with the same `label` can contain the same `blobid """ abstract type AbstractBlobstore{T} end const Blobstore = AbstractBlobstore + +function StructUtils.lower(::StructUtils.StructStyle, store::AbstractBlobstore) + return StructUtils.lower(Packed(store)) +end +@choosetype AbstractBlobstore resolvePackedType diff --git a/src/DataBlobs/services/BlobStores.jl b/src/DataBlobs/services/BlobStores.jl index 0915f91f..6e75a1b3 100644 --- a/src/DataBlobs/services/BlobStores.jl +++ b/src/DataBlobs/services/BlobStores.jl @@ -129,6 +129,8 @@ struct FolderStore{T} <: AbstractBlobstore{T} folder::String end +FolderStore(label::Symbol, folder::String) = FolderStore{Vector{UInt8}}(label, folder) + function FolderStore(foldername::String; label::Symbol = :default, createfolder = true) storepath = joinpath(foldername, string(label)) if createfolder && !isdir(storepath) diff --git a/src/DistributedFactorGraphs.jl b/src/DistributedFactorGraphs.jl index cd9162fb..2604fda3 100644 --- a/src/DistributedFactorGraphs.jl +++ b/src/DistributedFactorGraphs.jl @@ -249,13 +249,14 @@ export GraphsDFG # export listVariableMetadata, listFactorMetadata, listAgentMetadata, listGraphMetadata # export listVariableBlobentryMetadata, listFactorBlobentryMetadata, listAgentBlobentryMetadata, listGraphBlobentryMetadata +export deleteVariables! +export deleteFactors! ##============================================================================== ## Common Accessors ##============================================================================== export getLabel -# might only be public -export getId +public getId ##============================================================================== ## Internal or not yet ready @@ -285,7 +286,7 @@ export mergeAgentTags! public listTags public mergeTags! public emptyTags! -public deleteTags! #TODO do we want this one +public deleteTags! ##------------------------------------------------------------------------------ ## Bloblets @@ -574,9 +575,11 @@ include("DataBlobs/services/BlobStores.jl") include("DataBlobs/services/BlobPacking.jl") include("DataBlobs/services/BlobWrappers.jl") +#FIXME function getSolvable end function getVariableType end function isInitialized end +function listTags end # In Memory Types include("GraphsDFG/GraphsDFG.jl") using .GraphsDFGs diff --git a/src/FileDFG/services/FileDFG.jl b/src/FileDFG/services/FileDFG.jl index 9b465811..6f7bc9e3 100644 --- a/src/FileDFG/services/FileDFG.jl +++ b/src/FileDFG/services/FileDFG.jl @@ -16,7 +16,7 @@ v1 = addVariable!(dfg, :a, ContinuousScalar, tags = [:POSE], solvable=0) saveDFG(dfg, "/tmp/saveDFG.tar.gz") ``` """ -function saveDFG(folder::AbstractString, dfg::AbstractDFG; saveMetadata::Bool = true) +function saveDFG(folder::AbstractString, dfg::AbstractDFG) # TODO: Deprecate the folder functionality @@ -47,13 +47,18 @@ function saveDFG(folder::AbstractString, dfg::AbstractDFG; saveMetadata::Bool = @showprogress "saving factors" for f in factors JSON.json("$factorFolder/$(f.label).json", f; style = DFGJSONStyle()) end - #GraphsDFG metadata - if saveMetadata - @assert isa(dfg, GraphsDFG) "only metadata for GraphsDFG are supported" - @info "saving dfg metadata" - fgPacked = GraphsDFGs.packDFGMetadata(dfg) - JSON.json("$savepath/dfg.json", fgPacked; style = DFGJSONStyle()) - end + + #GraphsDFG nodes + @assert isa(dfg, GraphsDFG) "only metadata for GraphsDFG are supported" + p = Progress(4, "Saving DFG Nodes") + JSON.json("$savepath/graphroot.json", dfg.graph; style = DFGJSONStyle()) + next!(p) + JSON.json("$savepath/agent.json", dfg.agent; style = DFGJSONStyle()) + next!(p) + JSON.json("$savepath/solverparams.json", dfg.solverParams; style = DFGJSONStyle()) + next!(p) + JSON.json("$savepath/blobstores.json", dfg.blobStores; style = DFGJSONStyle()) + next!(p) savedir = dirname(savepath) # is this a path of just local name? #344 -- workaround with unique names savename = basename(string(savepath)) @@ -93,108 +98,51 @@ ls(dfg) See also: [`loadDFG`](@ref), [`saveDFG`](@ref) """ function loadDFG!( - dfgLoadInto::AbstractDFG, - dst::AbstractString; - overwriteDFGMetadata::Bool = true, -) - # - # loaddir gets deleted so needs to be unique - loaddir = split(joinpath("/", "tmp", "caesar", "random", string(uuid1())), '-')[1] - # Check if zipped destination (dst) by first doing fuzzy search from user supplied dst - folder = dst # working directory for fileDFG variable and factor operations - dstname = dst # path name could either be legacy FileDFG dir or .tar.gz file of FileDFG files. - unzip = false + dfgLoadInto::AbstractDFG{V, F}, + file::AbstractString; +) where {V <: AbstractGraphVariable, F <: AbstractGraphFactor} # add if doesn't have .tar.gz extension - lastdirname = splitpath(dstname)[end] - if !isdir(dst) - unzip = true - sdst = split(lastdirname, '.') - if sdst[end] != "gz" # length(sdst) == 1 && - dstname *= ".tar.gz" - lastdirname *= ".tar.gz" - end + if !contains(basename(file), ".tar.gz") + file *= ".tar.gz" end # check the file actually exists - @assert isfile(dstname) "cannot find file $dstname" - # TODO -- what if it is not a tar.gz but classic folder instead? - # do actual unzipping - filename = lastdirname[1:(end - length(".tar.gz"))] |> string - if unzip - Base.mkpath(loaddir) - folder = joinpath(loaddir, filename) - @debug "loadDFG! detected a gzip $dstname -- unpacking via $loaddir now..." - Base.rm(folder; recursive = true, force = true) - # unzip the tar file - tar_gz = open(dstname) - tar = CodecZlib.GzipDecompressorStream(tar_gz) - Tar.extract(tar, folder) - close(tar) - #or for non-compressed - # Tar.extract(dstname, folder) - end + @assert isfile(file) "cannot find file $file" - #GraphsDFG metadata - if overwriteDFGMetadata - @assert isa(dfgLoadInto, GraphsDFG) "Only GraphsDFG metadata are supported" - @info "loading dfg metadata" - jstr = read("$folder/dfg.json", String) - fgPacked = JSON.parse(jstr, GraphsDFGs.PackedGraphsDFG; style = DFGJSONStyle()) - GraphsDFGs.unpackDFGMetadata!(dfgLoadInto, fgPacked) - end + # only extract the json files needed for the variables and factors + tar_gz = open(file) + tar = CodecZlib.GzipDecompressorStream(tar_gz) + dfgnodenames = r"^(factors|variables)" + loaddir = Tar.extract(hdr -> contains(hdr.path, dfgnodenames), tar) + close(tar) # extract the factor graph from fileDFG folder - factors = FactorDFG[] - varFolder = "$folder/variables" - factorFolder = "$folder/factors" - # Folder preparations - !isdir(folder) && error("Can't load DFG graph - folder '$folder' doesn't exist") - !isdir(varFolder) && error("Can't load DFG graph - folder '$varFolder' doesn't exist") - !isdir(factorFolder) && - error("Can't load DFG graph - folder '$factorFolder' doesn't exist") - - # varFiles = sort(readdir(varFolder; sort = false); lt = natural_lt) - # factorFiles = sort(readdir(factorFolder; sort = false); lt = natural_lt) - varFiles = readdir(varFolder; sort = false) - factorFiles = readdir(factorFolder; sort = false) - - # FIXME, why is this treated different from VariableSkeleton, VariableSummary? + variablefiles = readdir(joinpath(loaddir, "variables"); sort = false, join = true) - usePackedVariable = - isa(dfgLoadInto, GraphsDFG) && getTypeDFGVariables(dfgLoadInto) == VariableDFG # type instability on `variables` as either `::Vector{Variable}` or `::Vector{VariableCompute{<:}}` (vector of abstract) - variables = @showprogress 1 "loading variables" asyncmap(varFiles) do varFile - jstr = read("$varFolder/$varFile", String) - packedvar = JSON.parse(jstr, VariableDFG; style = DFGJSONStyle()) - v = usePackedVariable ? packedvar : unpackVariable(packedvar) + variables = @showprogress 1 "loading variables" asyncmap(variablefiles) do file + v = JSON.parsefile(file, V; style = DFGJSONStyle()) return addVariable!(dfgLoadInto, v) end - @info "Loaded $(length(variables)) variables"#- $(map(v->v.label, variables))" + @debug "Loaded $(length(variables)) variables" - usePackedFactor = - isa(dfgLoadInto, GraphsDFG) && getTypeDFGFactors(dfgLoadInto) == FactorDFG + factorfiles = readdir(joinpath(loaddir, "factors"); sort = false, join = true) - # `factors` is not type stable `::Vector{Factor}` or `::Vector{FactorCompute{<:}}` (vector of abstract) - factors = @showprogress 1 "loading factors" asyncmap(factorFiles) do factorFile - f = JSON.parsefile("$factorFolder/$factorFile", FactorDFG; style = DFGJSONStyle()) + factors = @showprogress 1 "loading factors" asyncmap(factorfiles) do file + f = JSON.parsefile(file, F; style = DFGJSONStyle()) return addFactor!(dfgLoadInto, f) end - @info "Loaded $(length(factors)) factors"# - $(map(f->f.label, factors))" + @debug "Loaded $(length(factors)) factors" - if isa(dfgLoadInto, GraphsDFG) && getTypeDFGFactors(dfgLoadInto) != FactorDFG + if isa(dfgLoadInto, GraphsDFG) && getTypeDFGFactors(dfgLoadInto) <: FactorDFG # Finally, rebuild the CCW's for the factors to completely reinflate them @showprogress 1 "Rebuilding factor solver cache" for factor in factors rebuildFactorCache!(dfgLoadInto, factor) end end - # remove the temporary unzipped file - if unzip - @info "DFG.loadDFG! is deleting a temp folder created during unzip, $loaddir" - # need this because the number of files created in /tmp/caesar/random is becoming redonkulous. - Base.rm(loaddir; recursive = true, force = true) - end + Base.rm(loaddir; recursive = true, force = true) return dfgLoadInto end @@ -215,35 +163,40 @@ function loadDFG(file::AbstractString) # check the file actually exists @assert isfile(file) "cannot find file $file" - # only extract dfg.json to rebuild DFG object + # only extract the json files needed to rebuild DFG object tar_gz = open(file) tar = CodecZlib.GzipDecompressorStream(tar_gz) - loaddir = Tar.extract(hdr -> contains(hdr.path, "dfg.json"), tar) + dfgnodenames = r"^(agent\.json|blobstores\.json|graphroot\.json|solverparams\.json)$" + loaddir = Tar.extract(hdr -> contains(hdr.path, dfgnodenames), tar) close(tar) - #Only GraphsDFG metadata supported - jstr = read("$loaddir/dfg.json", String) - # --------------------------------- - #TODO deprecate old format, v0.28 - local fgPacked - try - fgPacked = JSON.parse(jstr, GraphsDFGs.PackedGraphsDFG; style = DFGJSONStyle()) - catch e - if e isa MethodError - @warn "Deprecated serialization: Failed to read DFG metadata. Attempting to load using the old format. Error:" e - fgPacked = GraphsDFGs.PackedGraphsDFG( - JSON.parse(jstr, GraphsDFGs._OldPackedGraphsDFG; style = DFGJSONStyle()), - ) - else - rethrow(e) - end - end - # ---------------------------------- - dfg = GraphsDFGs.unpackDFGMetadata(fgPacked) + progess = Progress(4, "Loading DFG Nodes") + agent = JSON.parsefile(joinpath(loaddir, "agent.json"), Agent; style = DFGJSONStyle()) + next!(progess) + graph = JSON.parsefile( + joinpath(loaddir, "graphroot.json"), + Graphroot; + style = DFGJSONStyle(), + ) + next!(progess) + solverParams = JSON.parsefile( + joinpath(loaddir, "solverparams.json"), + AbstractDFGParams; + style = DFGJSONStyle(), + ) + next!(progess) + blobStores = JSON.parsefile( + joinpath(loaddir, "blobstores.json"), + Dict{Symbol, AbstractBlobstore}; + style = DFGJSONStyle(), + ) + next!(progess) + + dfg = GraphsDFG(; agent, graph, solverParams, blobStores) @debug "DFG.loadDFG is deleting a temp folder created during unzip, $loaddir" # cleanup temporary folder Base.rm(loaddir; recursive = true, force = true) - return loadDFG!(dfg, file; overwriteDFGMetadata = false) + return loadDFG!(dfg, file) end diff --git a/src/GraphsDFG/GraphsDFG.jl b/src/GraphsDFG/GraphsDFG.jl index 00c33339..6c9090b1 100644 --- a/src/GraphsDFG/GraphsDFG.jl +++ b/src/GraphsDFG/GraphsDFG.jl @@ -63,9 +63,7 @@ import ...DistributedFactorGraphs: addGraphBlobentry!, addGraphBlobentries!, listGraphBlobentries, - listAgentBlobentries, - getTypeDFGVariables, - getTypeDFGFactors + listAgentBlobentries include("FactorGraphs/FactorGraphs.jl") using .FactorGraphs @@ -74,7 +72,6 @@ using .FactorGraphs # Imports include("entities/GraphsDFG.jl") include("services/GraphsDFG.jl") -include("services/GraphsDFGSerialization.jl") # Exports export GraphsDFG diff --git a/src/GraphsDFG/services/GraphsDFGSerialization.jl b/src/GraphsDFG/services/GraphsDFGSerialization.jl deleted file mode 100644 index f3bc7943..00000000 --- a/src/GraphsDFG/services/GraphsDFGSerialization.jl +++ /dev/null @@ -1,112 +0,0 @@ -using InteractiveUtils - -@kwdef struct PackedGraphsDFG{T <: AbstractDFGParams} - # addHistory::Vector{Symbol} - solverParams::T - solverParams_type::String = string(nameof(typeof(solverParams))) - typePackedVariable::Bool = false # Are variables packed or full - typePackedFactor::Bool = false # Are factors packed or full - blobStores::Union{Nothing, Dict{Symbol, FolderStore{Vector{UInt8}}}} #FIXME allow more types of blobstores - graph::Graphroot - agent::Agent -end - -function getPackedGraphsDFGSubtype(s) - subs = subtypes(AbstractDFGParams) - idx = findfirst(x -> nameof(x) == Symbol(s.solverParams_type[]), subs) - isnothing(idx) && throw( - DFG.SerializationError("Unknown solver parameters type `$(s.solverParams_type[])`"), - ) - return PackedGraphsDFG{subs[idx]} -end -JSON.@choosetype PackedGraphsDFG getPackedGraphsDFGSubtype - -function getTypeDFGVariables( - fg::GraphsDFG{<:AbstractDFGParams, T, <:AbstractGraphFactor}, -) where {T} - return T -end -function getTypeDFGFactors( - fg::GraphsDFG{<:AbstractDFGParams, <:AbstractGraphVariable, T}, -) where {T} - return T -end - -## -""" - $(SIGNATURES) -Packing function to serialize DFG metadata from. -""" -function packDFGMetadata(fg::GraphsDFG) - commonfields = intersect(fieldnames(PackedGraphsDFG), fieldnames(GraphsDFG)) - - setdiff!(commonfields, [:blobStores]) - blobStores = Dict{Symbol, FolderStore{Vector{UInt8}}}() - foreach(values(fg.blobStores)) do store - if store isa FolderStore{Vector{UInt8}} - blobStores[getLabel(store)] = store - else - @warn "Blobstore $(getLabel(store)) of type $(typeof(store)) is not supported yet and will not be saved" - end - end - - props = (k => getproperty(fg, k) for k in commonfields) - return PackedGraphsDFG(; - typePackedVariable = getTypeDFGVariables(fg) == VariableDFG, - typePackedFactor = getTypeDFGFactors(fg) == FactorDFG, - blobStores, - props..., - ) -end - -function unpackDFGMetadata(packed::PackedGraphsDFG) - commonfields = intersect(fieldnames(GraphsDFG), fieldnames(PackedGraphsDFG)) - - setdiff!(commonfields, [:blobStores]) - blobStores = packed.blobStores - - #TODO add 'CanSerialize' trait to blobstores and also serialize NvaBlobStores - _isfolderstorepath(s) = false - _isfolderstorepath(s::FolderStore) = ispath(s.folder) - # FIXME escalate to keyword - for (ks, bs) in blobStores - if !_isfolderstorepath(bs) - delete!(blobStores, ks) - @warn("Unable to load blobstore, $ks from $(bs.folder)") - end - end - - props = (k => getproperty(packed, k) for k in commonfields) - - VT = if isnothing(packed.typePackedVariable) || !packed.typePackedVariable - VariableCompute - else - VariableDFG - end - FT = if isnothing(packed.typePackedFactor) || !packed.typePackedFactor - FactorCompute - else - FactorDFG - end - # VT = isnothing(packed.typePackedVariable) || packed.typePackedVariable ? Variable : VariableCompute - # FT = isnothing(packed.typePackedFactor) || packed.typePackedFactor ? FactorDFG : FactorCompute - - props = filter!(collect(props)) do (k, v) - return !isnothing(v) - end - - return GraphsDFG{typeof(packed.solverParams), VT, FT}(; blobStores, props...) -end - -function unpackDFGMetadata!(dfg::GraphsDFG, packed::PackedGraphsDFG) - commonfields = intersect(fieldnames(GraphsDFG), fieldnames(PackedGraphsDFG)) - - setdiff!(commonfields, [:blobStores]) - !isnothing(packed.blobStores) && merge!(dfg.blobStores, packed.blobStores) - - props = (k => getproperty(packed, k) for k in commonfields) - foreach(props) do (k, v) - return setproperty!(dfg, k, v) - end - return dfg -end diff --git a/src/entities/AbstractDFG.jl b/src/entities/AbstractDFG.jl index 55047c93..cabacf6a 100644 --- a/src/entities/AbstractDFG.jl +++ b/src/entities/AbstractDFG.jl @@ -49,3 +49,8 @@ $(TYPEDEF) Empty structure for solver parameters. """ struct NoSolverParams <: AbstractDFGParams end + +function StructUtils.lower(::StructUtils.StructStyle, p::AbstractDFGParams) + return StructUtils.lower(Packed(p)) +end +@choosetype AbstractDFGParams resolvePackedType diff --git a/src/entities/DFGFactor.jl b/src/entities/DFGFactor.jl index b63aee72..90d7aa85 100644 --- a/src/entities/DFGFactor.jl +++ b/src/entities/DFGFactor.jl @@ -213,7 +213,7 @@ Read-only summary factor structure for a DistributedFactorGraph factor. Fields: $(TYPEDFIELDS) """ -Base.@kwdef struct FactorSummary <: AbstractGraphFactor +@tags struct FactorSummary <: AbstractGraphFactor """Factor label, e.g. :x1f1. Accessor: [`getLabel`](@ref)""" label::Symbol @@ -222,7 +222,7 @@ Base.@kwdef struct FactorSummary <: AbstractGraphFactor tags::Set{Symbol} """Ordered list of the neighbor variables. Accessors: [`getVariableOrder`](@ref)""" - variableorder::Tuple{Vararg{Symbol}} #TODO changed to NTuple + variableorder::Tuple{Vararg{Symbol}} & (choosetype = x->NTuple{length(x), Symbol},) #TODO changed to NTuple """Variable timestamp. Accessors: [`getTimestamp`](@ref)""" timestamp::TimeDateZone @@ -249,7 +249,7 @@ Skeleton factor structure for a DistributedFactorGraph factor. Fields: $(TYPEDFIELDS) """ -Base.@kwdef struct FactorSkeleton <: AbstractGraphFactor +@tags struct FactorSkeleton <: AbstractGraphFactor """Factor label, e.g. :x1f1. Accessor: [`getLabel`](@ref)""" label::Symbol @@ -258,7 +258,7 @@ Base.@kwdef struct FactorSkeleton <: AbstractGraphFactor tags::Set{Symbol} """Ordered list of the neighbor variables. Accessors: [`getVariableOrder`](@ref)""" - variableorder::Tuple{Vararg{Symbol}} + variableorder::Tuple{Vararg{Symbol}} & (choosetype = x->NTuple{length(x), Symbol},) end ##------------------------------------------------------------------------------ diff --git a/src/services/AbstractDFG.jl b/src/services/AbstractDFG.jl index e9e2a641..257a2929 100644 --- a/src/services/AbstractDFG.jl +++ b/src/services/AbstractDFG.jl @@ -55,7 +55,7 @@ Method must be overloaded by the user for Serialization to work. """ function rebuildFactorCache!(dfg::AbstractDFG, factor::AbstractGraphFactor, neighbors = []) @warn( - "FactorCache not build, rebuildFactorCache! is not implemented for $(typeof(dfg)). Make sure to load IncrementalInference.", + "FactorCache not build, rebuildFactorCache! is not implemented for $(typeof(dfg)). `rebuildFactorCache!` is available in IncrementalInference.", maxlog = 1 ) return nothing @@ -65,13 +65,13 @@ end $(SIGNATURES) Function to get the type of the variables in the DFG. """ -function getTypeDFGVariables end +getTypeDFGVariables(::AbstractDFG{V, F}) where {V, F} = V """ $(SIGNATURES) Function to get the type of the factors in the DFG. """ -function getTypeDFGFactors end +getTypeDFGFactors(::AbstractDFG{V, F}) where {V, F} = F ##------------------------------------------------------------------------------ ## Setters @@ -371,6 +371,18 @@ function deleteVariable!(dfg::AbstractDFG, variable::AbstractGraphVariable) return deleteVariable!(dfg, variable.label) end +function deleteVariables!(dfg::AbstractDFG, labels::Vector{Symbol}) + counts = asyncmap(labels) do l + return deleteVariable!(dfg, l) + end + return sum(counts) +end + +function deleteVariables!(dfg::AbstractDFG; kwargs...) + labels = listVariables(dfg; kwargs...) + return deleteVariables!(dfg, labels) +end + """ $(SIGNATURES) Delete the referenced Factor from the DFG. @@ -379,6 +391,18 @@ function deleteFactor!(dfg::AbstractDFG, factor::AbstractGraphFactor) return deleteFactor!(dfg, factor.label) end +function deleteFactors!(dfg::AbstractDFG, labels::Vector{Symbol}) + counts = asyncmap(labels) do l + return deleteFactor!(dfg, l) + end + return sum(counts) +end + +function deleteFactors!(dfg::AbstractDFG; kwargs...) + labels = listFactors(dfg; kwargs...) + return deleteFactors!(dfg, labels) +end + # rather use isa in code, but ok, here it is isVariable(dfg::AbstractDFG, node::AbstractGraphVariable) = true isFactor(dfg::AbstractDFG, node::AbstractGraphFactor) = true diff --git a/test/testBlocks.jl b/test/testBlocks.jl index 2aa478ed..9f0ed9b8 100644 --- a/test/testBlocks.jl +++ b/test/testBlocks.jl @@ -1598,36 +1598,35 @@ function FileDFGTestBlock(testDFGAPI; kwargs...) # kwargs = () # filename = "/tmp/fileDFG" dfg, verts, facs = connectivityTestGraph(testDFGAPI; kwargs...) + v4 = getVariable(dfg, :x4) + vnd = getState(v4, :default) + # set everything + # vnd.BayesNetVertID = :outid + # push!(vnd.BayesNetOutVertIDs, :id) + # vnd.bw[1] = [1.0;] + # vnd.dontmargin = true + # vnd.eliminated = true + vnd.observability .= Float64[1.5;] + vnd.initialized = true + vnd.marginalized = true + push!(vnd.separator, :sep) + vnd.solves = 2 + # vnd.val[1] = [2.0;] + #update + mergeVariable!(dfg, v4) + + f45 = getFactor(dfg, :x4x5f1) + fsd = getFactorState(f45) + # set some factor solver data + push!(fsd.certainhypo, 2) + fsd.eliminated = true + push!(fsd.multihypo, 4.0) + fsd.nullhypo = 5.0 + fsd.potentialused = true + #update factor + mergeFactor!(dfg, f45) for filename in ["/tmp/fileDFG", "/tmp/FileDFGExtension.tar.gz"] - v4 = getVariable(dfg, :x4) - vnd = getState(v4, :default) - # set everything - # vnd.BayesNetVertID = :outid - # push!(vnd.BayesNetOutVertIDs, :id) - # vnd.bw[1] = [1.0;] - # vnd.dontmargin = true - # vnd.eliminated = true - vnd.observability .= Float64[1.5;] - vnd.initialized = true - vnd.marginalized = true - push!(vnd.separator, :sep) - vnd.solves = 2 - # vnd.val[1] = [2.0;] - #update - mergeVariable!(dfg, v4) - - f45 = getFactor(dfg, :x4x5f1) - fsd = getFactorState(f45) - # set some factor solver data - push!(fsd.certainhypo, 2) - fsd.eliminated = true - push!(fsd.multihypo, 4.0) - fsd.nullhypo = 5.0 - fsd.potentialused = true - #update factor - mergeFactor!(dfg, f45) - # Save and load the graph to test. saveDFG(dfg, filename) @@ -1647,9 +1646,38 @@ function FileDFGTestBlock(testDFGAPI; kwargs...) @test getFactor(dfg, fact) == getFactor(retDFG, fact) end + dfg2 = loadDFG(filename) + + @test issetequal(ls(dfg), ls(dfg2)) + @test issetequal(lsf(dfg), lsf(dfg2)) + for var in ls(dfg) + @test getVariable(dfg, var) == getVariable(dfg2, var) + end + for fact in lsf(dfg) + @test getFactor(dfg, fact) == getFactor(dfg2, fact) + end + #TODO test graph, agent, blob stores, solverdata. + # @test length(getBlobentries(getVariable(retDFG, :x1))) == 1 # @test typeof(getBlobentry(getVariable(retDFG, :x1),:testing)) == GeneralDataEntry # @test length(getBlobentries(getVariable(retDFG, :x2))) == 1 # @test typeof(getBlobentry(getVariable(retDFG, :x2),:testing2)) == FileDataEntry end + + filename = "/tmp/fileDFG" + summarydfg = testDFGAPI{NoSolverParams, VariableSummary, FactorSummary}(; + graphLabel = :testGraph, + ) + loadDFG!(summarydfg, filename) + + @test issetequal(ls(dfg), ls(summarydfg)) + @test issetequal(lsf(dfg), lsf(summarydfg)) + + skeletondfg = testDFGAPI{NoSolverParams, VariableSkeleton, FactorSkeleton}(; + graphLabel = :testGraph, + ) + loadDFG!(skeletondfg, filename) + + @test issetequal(ls(dfg), ls(skeletondfg)) + @test issetequal(lsf(dfg), lsf(skeletondfg)) end