From 47a37dd0367e3c8aa3f5ac42c2df97c56fa4187f Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 20 Jul 2025 18:24:56 +0800 Subject: [PATCH 1/3] upgrade-OMEinsum, new-fileio --- Makefile | 8 ++------ Project.toml | 2 +- docs/src/performancetips.md | 9 ++++----- docs/src/ref.md | 8 ++++++++ src/GenericTensorNetworks.jl | 6 +++++- src/fileio.jl | 34 ++++++++++++++++++++++++++++++++++ src/networks.jl | 23 +++++++++++++++-------- test/fileio.jl | 27 +++++++++++++++++++++++++++ test/interfaces.jl | 4 ++-- 9 files changed, 98 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index d589423..cd4006c 100644 --- a/Makefile +++ b/Makefile @@ -3,14 +3,10 @@ JL = julia --project default: init test init: - $(JL) -e 'using Pkg; Pkg.precompile()' -init-docs: - $(JL) -e 'using Pkg; Pkg.activate("docs"); Pkg.develop(path="."), Pkg.precompile()' + $(JL) -e 'using Pkg; Pkg.precompile(); Pkg.activate("docs"); Pkg.develop(path=".")' update: - $(JL) -e 'using Pkg; Pkg.update(); Pkg.precompile()' -update-docs: - $(JL) -e 'using Pkg; Pkg.activate("docs"); Pkg.update(); Pkg.precompile()' + $(JL) -e 'using Pkg; Pkg.update(); Pkg.activate("docs"); Pkg.update()' test: $(JL) -e 'using Pkg; Pkg.test("GenericTensorNetworks")' diff --git a/Project.toml b/Project.toml index 07d23c5..451cdba 100644 --- a/Project.toml +++ b/Project.toml @@ -38,7 +38,7 @@ FFTW = "1.4" Graphs = "1.7" LinearAlgebra = "1" LuxorGraphPlot = "0.5" -OMEinsum = "0.8" +OMEinsum = "0.9.1" Polynomials = "4" Primes = "0.5" ProblemReductions = "0.3" diff --git a/docs/src/performancetips.md b/docs/src/performancetips.md index 5c9d0c7..03a42e4 100644 --- a/docs/src/performancetips.md +++ b/docs/src/performancetips.md @@ -20,7 +20,7 @@ using GenericTensorNetworks, Graphs, Random graph = random_regular_graph(120, 3) iset = IndependentSet(graph) problem = GenericTensorNetwork(iset; optimizer=TreeSA( - sc_target=20, sc_weight=1.0, rw_weight=3.0, ntrials=10, βs=0.01:0.1:15.0, niters=20)) + score=ScoreFunction(sc_target=20, sc_weight=1.0, rw_weight=3.0), ntrials=10, βs=0.01:0.1:15.0, niters=20)) ``` The `GenericTensorNetwork` constructor maps a problem to a tensor network with an optimized contraction order. The `optimizer` parameter specifies the algorithm to use: @@ -75,8 +75,7 @@ The finite field approach requires only 298 KB, while using the `Polynomial` typ ## 2. Slicing Technique for Large Problems For large-scale applications, you can slice over certain degrees of freedom to reduce space complexity. This approach loops and accumulates over selected degrees of freedom, resulting in smaller tensor networks inside the loop. - -In the `TreeSA` optimizer, set `nslices` to a value greater than zero: +This can be achieved by setting the `slicer` parameter of the `GenericTensorNetwork` constructor. ```julia # Without slicing @@ -84,11 +83,11 @@ problem = GenericTensorNetwork(iset; optimizer=TreeSA(βs=0.01:0.1:25.0, ntrials contraction_complexity(problem) # With slicing over 5 degrees of freedom -problem = GenericTensorNetwork(iset; optimizer=TreeSA(βs=0.01:0.1:25.0, ntrials=10, niters=10, nslices=5)) +problem = GenericTensorNetwork(iset; optimizer=TreeSA(βs=0.01:0.1:25.0, ntrials=10, niters=10), slicer=TreeSASlicer(score=ScoreFunction(sc_target=10))) contraction_complexity(problem) ``` -In this example, slicing over 5 degrees of freedom reduces space complexity by a factor of 32 (2^5), while increasing computation time by less than a factor of 2. +In this example, slicing with the `TreeSASlicer` to reach space complexity of 2^10, at the cost of increased time complexity. ## 3. Accelerating Tropical Number Operations diff --git a/docs/src/ref.md b/docs/src/ref.md index 18d1c82..dd84fa2 100644 --- a/docs/src/ref.md +++ b/docs/src/ref.md @@ -144,6 +144,14 @@ SABipartite KaHyParBipartite MergeVectors MergeGreedy +TreeSASlicer +ScoreFunction +``` + +## FileIO +```@docs +save_tensor_network +load_tensor_network ``` ## Others diff --git a/src/GenericTensorNetworks.jl b/src/GenericTensorNetworks.jl index 856e802..0b0f6e4 100644 --- a/src/GenericTensorNetworks.jl +++ b/src/GenericTensorNetworks.jl @@ -4,6 +4,7 @@ using Core: Argument using TropicalNumbers using OMEinsum using OMEinsum: contraction_complexity, timespace_complexity, timespacereadwrite_complexity, getixsv, NestedEinsum, getixs, getiy, DynamicEinCode +using OMEinsum.OMEinsumContractionOrders.JSON using Graphs, Random using DelimitedFiles, Serialization using LuxorGraphPlot @@ -26,7 +27,7 @@ import StatsBase # OMEinsum export timespace_complexity, timespacereadwrite_complexity, contraction_complexity, @ein_str, getixsv, getiyv -export GreedyMethod, TreeSA, SABipartite, KaHyParBipartite, MergeVectors, MergeGreedy +export GreedyMethod, TreeSA, SABipartite, KaHyParBipartite, MergeVectors, MergeGreedy, TreeSASlicer, ScoreFunction # estimate memory export estimate_memory @@ -80,6 +81,9 @@ export read_size, read_count, read_config, read_size_count, read_size_config export show_graph, show_configs, show_einsum, GraphDisplayConfig, render_locs, show_landscape export AbstractLayout, SpringLayout, StressLayout, SpectralLayout, Layered, LayeredSpringLayout, LayeredStressLayout +# FileIO +export save_tensor_network, load_tensor_network + project_relative_path(xs...) = normpath(joinpath(dirname(dirname(pathof(@__MODULE__))), xs...)) # Mods.jl fixed to v1.3.4 diff --git a/src/fileio.jl b/src/fileio.jl index 4859f95..e8de8f2 100644 --- a/src/fileio.jl +++ b/src/fileio.jl @@ -123,3 +123,37 @@ function dict_deserialize_tree(id::UInt, d::Dict) end end +function save_tensor_network(tn::GenericTensorNetwork; folder::String) + !isdir(folder) && mkpath(folder) + + OMEinsum.writejson(joinpath(folder, "code.json"), tn.code) + + open(joinpath(folder, "fixedvertices.json"), "w") do io + JSON.print(io, tn.fixedvertices, 2) + end + + ProblemReductions.writejson(joinpath(folder, "problem.json"), tn.problem) + return nothing +end + +function load_tensor_network(folder::String) + !isdir(folder) && throw(SystemError("Folder not found: $folder")) + + code_path = joinpath(folder, "code.json") + fixed_path = joinpath(folder, "fixedvertices.json") + problem_path = joinpath(folder, "problem.json") + + !isfile(code_path) && throw(SystemError("Code file not found: $code_path")) + !isfile(fixed_path) && throw(SystemError("Fixedvertices file not found: $fixed_path")) + !isfile(problem_path) && throw(SystemError("Problem file not found: $problem_path")) + + code = OMEinsum.readjson(code_path) + + fixed_dict = JSON.parsefile(fixed_path) + fixedvertices = Dict{labeltype(code),Int}(parse(Int, k) => v for (k, v) in fixed_dict) + + problem = ProblemReductions.readjson(problem_path) + + return GenericTensorNetwork(problem, code, fixedvertices) +end + diff --git a/src/networks.jl b/src/networks.jl index 13a2861..c35e4c4 100644 --- a/src/networks.jl +++ b/src/networks.jl @@ -16,29 +16,36 @@ end """ $TYPEDEF - GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod()) + GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod(), slicer=nothing) The generic tensor network that generated from a [`ConstraintSatisfactionProblem`](@ref). Positional arguments ------------------------------- -* `problem` is the graph problem. -* `code` is the tensor network contraction code. -* `fixedvertices` is a dictionary specifying the fixed dimensions. +- `problem` is the constraint satisfaction problem. + +Keyword arguments +------------------------------- +- `openvertices` is a vector of open indices, which are the degrees of freedoms that appears in the output tensor. +- `fixedvertices` is a dictionary specifying the fixed degrees of freedom. For example, If I want to fix the variable `5` to be 0, I can set `fixedvertices = Dict(5 => 0)`. +- `optimizer` is the contraction order optimizer for the generated tensor network. +- `slicer` is the slicer for the tensor network, it can reduce the memory usage at the cost of computing time by slicing the tensor network. + +For more information about contraction order optimization and slicing, please refer to the [OMEinsumContractionOrders documentation](https://tensorbfs.github.io/OMEinsumContractionOrders.jl/dev/). """ struct GenericTensorNetwork{CFG, CT, LT} problem::CFG code::CT fixedvertices::Dict{LT,Int} end -function GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod()) +function GenericTensorNetwork(problem::ConstraintSatisfactionProblem; openvertices=(), fixedvertices=Dict(), optimizer=GreedyMethod(), slicer=nothing) rcode = rawcode(problem; openvertices) - code = _optimize_code(rcode, uniformsize_fix(rcode, num_flavors(problem), fixedvertices), optimizer, MergeVectors()) + code = _optimize_code(rcode, uniformsize_fix(rcode, num_flavors(problem), fixedvertices), optimizer, MergeVectors(), slicer) return GenericTensorNetwork(problem, code, Dict{labeltype(code),Int}(fixedvertices)) end # a unified interface to optimize the contraction code -_optimize_code(code, size_dict, optimizer::Nothing, simplifier) = code -_optimize_code(code, size_dict, optimizer, simplifier) = optimize_code(code, size_dict, optimizer, simplifier) +_optimize_code(code, size_dict, optimizer::Nothing, simplifier, slicer) = code +_optimize_code(code, size_dict, optimizer, simplifier, slicer) = optimize_code(code, size_dict, optimizer; simplifier, slicer) function Base.show(io::IO, tn::GenericTensorNetwork) println(io, "$(typeof(tn))") diff --git a/test/fileio.jl b/test/fileio.jl index 8cc63d0..cfa63da 100644 --- a/test/fileio.jl +++ b/test/fileio.jl @@ -51,3 +51,30 @@ end @test ma == tree end +@testset "save load GenericTensorNetwork" begin + g = smallgraph(:petersen) + problem = IndependentSet(g, UnitWeight(10)) + tn = GenericTensorNetwork(problem; fixedvertices=Dict(1=>0, 2=>1)) + folder = tempname() + GenericTensorNetworks.save_tensor_network(tn; folder=folder) + tn2 = GenericTensorNetworks.load_tensor_network(folder) + @test tn.problem == tn2.problem + @test tn.code == tn2.code + @test tn.fixedvertices == tn2.fixedvertices + @test solve(tn, SizeMax()) == solve(tn2, SizeMax()) + + # test with empty fixedvertices + tn3 = GenericTensorNetwork(problem) + folder2 = tempname() + GenericTensorNetworks.save_tensor_network(tn3; folder=folder2) + tn4 = GenericTensorNetworks.load_tensor_network(folder2) + @test tn3.problem == tn4.problem + @test tn3.code == tn4.code + @test tn3.fixedvertices == tn4.fixedvertices + + # test error cases + empty_folder = tempname() + mkpath(empty_folder) + @test_throws SystemError GenericTensorNetworks.load_tensor_network(empty_folder) +end + diff --git a/test/interfaces.jl b/test/interfaces.jl index a138cee..06ac19c 100644 --- a/test/interfaces.jl +++ b/test/interfaces.jl @@ -86,7 +86,7 @@ end @testset "slicing" begin g = Graphs.smallgraph("petersen") - gp = GenericTensorNetwork(IndependentSet(g), optimizer=TreeSA(nslices=5, ntrials=1)) + gp = GenericTensorNetwork(IndependentSet(g), optimizer=TreeSA(ntrials=1), slicer=TreeSASlicer(score=ScoreFunction(sc_target=2))) res1 = solve(gp, SizeMax())[] res2 = solve(gp, CountingAll())[] res3 = solve(gp, CountingMax(Single))[] @@ -278,4 +278,4 @@ end graph = UnitDiskGraph(fullerene(), sqrt(5)) spin_glass = SpinGlass(graph, UnitWeight(ne(graph)), zeros(Int, nv(graph))) @test log(solve(spin_glass, PartitionFunction(1.0))[])/nv(graph) ≈ 1.3073684577607942 -end \ No newline at end of file +end From 5b563c9d43ba5a1f62181d28ee155648db2bec77 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Sun, 20 Jul 2025 19:25:47 +0800 Subject: [PATCH 2/3] update docstrings --- src/fileio.jl | 32 ++++++++++++++++++++++++++++++++ test/fileio.jl | 10 +++++----- 2 files changed, 37 insertions(+), 5 deletions(-) diff --git a/src/fileio.jl b/src/fileio.jl index e8de8f2..9aa8f72 100644 --- a/src/fileio.jl +++ b/src/fileio.jl @@ -123,6 +123,23 @@ function dict_deserialize_tree(id::UInt, d::Dict) end end +""" + save_tensor_network(tn::GenericTensorNetwork; folder::String) + +Serialize a tensor network to disk for storage/reloading. Creates three structured files: +- `code.json`: OMEinsum contraction code (tree structure and contraction order) +- `fixedvertices.json`: JSON-serialized Dict of pinned vertex configurations +- `problem.json`: Problem specification using ProblemReductions serialization + +The target folder will be created recursively if it doesn't exist. Files are overwritten +if they already exist. Uses JSON for human-readable serialization with type preservation. + +The saved files can be loaded using [`load_tensor_network`](@ref). + +# Arguments +- `tn::GenericTensorNetwork`: a [`GenericTensorNetwork`](@ref) instance to serialize. Must contain valid code, problem, and fixedvertices fields. +- `folder::String`: Destination directory path. Parent directories will be created as needed. +""" function save_tensor_network(tn::GenericTensorNetwork; folder::String) !isdir(folder) && mkpath(folder) @@ -136,6 +153,21 @@ function save_tensor_network(tn::GenericTensorNetwork; folder::String) return nothing end +""" + load_tensor_network(folder::String) -> GenericTensorNetwork + +Load a tensor network from disk that was previously saved using [`save_tensor_network`](@ref). +Reconstructs the network from three required files: contraction code, fixed vertices mapping, and problem specification. + +# Arguments +- `folder::String`: Path to directory containing saved network files. Must contain: + - `code.json`: Contraction order/structure from OMEinsum + - `fixedvertices.json`: Dictionary of pinned vertex states + - `problem.json`: Problem specification and parameters + +# Returns +- `GenericTensorNetwork`: Reconstructed tensor network. +""" function load_tensor_network(folder::String) !isdir(folder) && throw(SystemError("Folder not found: $folder")) diff --git a/test/fileio.jl b/test/fileio.jl index cfa63da..f54806f 100644 --- a/test/fileio.jl +++ b/test/fileio.jl @@ -56,8 +56,8 @@ end problem = IndependentSet(g, UnitWeight(10)) tn = GenericTensorNetwork(problem; fixedvertices=Dict(1=>0, 2=>1)) folder = tempname() - GenericTensorNetworks.save_tensor_network(tn; folder=folder) - tn2 = GenericTensorNetworks.load_tensor_network(folder) + save_tensor_network(tn; folder=folder) + tn2 = load_tensor_network(folder) @test tn.problem == tn2.problem @test tn.code == tn2.code @test tn.fixedvertices == tn2.fixedvertices @@ -66,8 +66,8 @@ end # test with empty fixedvertices tn3 = GenericTensorNetwork(problem) folder2 = tempname() - GenericTensorNetworks.save_tensor_network(tn3; folder=folder2) - tn4 = GenericTensorNetworks.load_tensor_network(folder2) + save_tensor_network(tn3; folder=folder2) + tn4 = load_tensor_network(folder2) @test tn3.problem == tn4.problem @test tn3.code == tn4.code @test tn3.fixedvertices == tn4.fixedvertices @@ -75,6 +75,6 @@ end # test error cases empty_folder = tempname() mkpath(empty_folder) - @test_throws SystemError GenericTensorNetworks.load_tensor_network(empty_folder) + @test_throws SystemError load_tensor_network(empty_folder) end From 80c1663a95a9efdedb0000cb1f9a0e33a1e65721 Mon Sep 17 00:00:00 2001 From: GiggleLiu Date: Mon, 28 Jul 2025 09:46:46 +0800 Subject: [PATCH 3/3] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 451cdba..1d24212 100644 --- a/Project.toml +++ b/Project.toml @@ -1,7 +1,7 @@ name = "GenericTensorNetworks" uuid = "3521c873-ad32-4bb4-b63d-f4f178f42b49" authors = ["GiggleLiu and contributors"] -version = "4.0.1" +version = "4.1.0" [deps] AbstractTrees = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"