From c81a2117e780b1d6299f97bf3610d851b0c1ef08 Mon Sep 17 00:00:00 2001 From: leburgel Date: Mon, 31 Oct 2022 11:38:12 -0700 Subject: [PATCH 01/13] Sweeping algorithms for tree tensor networks: draft --- Manifest.toml | 191 +++--- Project.toml | 3 + README.md | 8 +- src/ITensorNetworks.jl | 16 +- src/abstractindsnetwork.jl | 2 + src/abstractitensornetwork.jl | 267 +++++++- src/exports.jl | 33 +- src/graphs/abstractgraph.jl | 30 + src/graphs/namedgraphs.jl | 49 ++ src/imports.jl | 37 +- src/itensornetwork.jl | 58 +- src/treetensornetwork/abstractprojttno.jl | 141 +++++ .../abstracttreetensornetwork.jl | 399 ++++++++++++ src/treetensornetwork/opsum_to_ttno.jl | 589 ++++++++++++++++++ src/treetensornetwork/projttno.jl | 52 ++ src/treetensornetwork/projttnosum.jl | 61 ++ src/treetensornetwork/treetensornetwork.jl | 86 --- src/treetensornetwork/ttno.jl | 148 +++++ src/treetensornetwork/ttns.jl | 128 ++++ src/utility.jl | 17 + test/Manifest.toml | 385 ++++++++---- test/Project.toml | 1 + test/test_abstractgraph.jl | 65 ++ test/test_itensornetwork.jl | 102 +++ test/test_itensornetwork_basics.jl | 66 -- test/test_namedgraphs.jl | 144 +++++ test/test_opsum_to_ttno.jl | 73 +++ test/test_ttno.jl | 51 ++ test/test_ttns.jl | 49 ++ test/utils.jl | 81 +++ 30 files changed, 2913 insertions(+), 419 deletions(-) create mode 100644 src/graphs/abstractgraph.jl create mode 100644 src/graphs/namedgraphs.jl create mode 100644 src/treetensornetwork/abstractprojttno.jl create mode 100644 src/treetensornetwork/abstracttreetensornetwork.jl create mode 100644 src/treetensornetwork/opsum_to_ttno.jl create mode 100644 src/treetensornetwork/projttno.jl create mode 100644 src/treetensornetwork/projttnosum.jl delete mode 100644 src/treetensornetwork/treetensornetwork.jl create mode 100644 src/treetensornetwork/ttno.jl create mode 100644 src/treetensornetwork/ttns.jl create mode 100644 src/utility.jl create mode 100644 test/test_abstractgraph.jl create mode 100644 test/test_itensornetwork.jl delete mode 100644 test/test_itensornetwork_basics.jl create mode 100644 test/test_namedgraphs.jl create mode 100644 test/test_opsum_to_ttno.jl create mode 100644 test/test_ttno.jl create mode 100644 test/test_ttns.jl create mode 100644 test/utils.jl diff --git a/Manifest.toml b/Manifest.toml index f43b7fa0..4fa2f97c 100644 --- a/Manifest.toml +++ b/Manifest.toml @@ -1,16 +1,29 @@ # This file is machine-generated - editing it directly is not advised -julia_version = "1.7.3" +julia_version = "1.8.0-rc3" manifest_format = "2.0" +project_hash = "e7f73cdcdb9ff2f97d14f4dfcb5b7962b23b754e" + +[[deps.AbstractTrees]] +git-tree-sha1 = "52b3b436f8f73133d7bc3a6c71ee7ed6ab2ab754" +uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" +version = "0.4.3" + +[[deps.Adapt]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "195c5505521008abea5aee4f96930717958eac6f" +uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" +version = "3.4.0" [[deps.ArgTools]] uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.1" [[deps.ArnoldiMethod]] deps = ["LinearAlgebra", "Random", "StaticArrays"] -git-tree-sha1 = "f87e559f87a45bece9c9ed97458d3afe98b1ebb9" +git-tree-sha1 = "62e51b39331de8911e4a7ff6f5aaf38a5f4cc0ae" uuid = "ec485272-7323-5ecc-a04f-4719b315124d" -version = "0.1.0" +version = "0.2.0" [[deps.Artifacts]] uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" @@ -18,37 +31,32 @@ uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" [[deps.Base64]] uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" -[[deps.Blosc]] -deps = ["Blosc_jll"] -git-tree-sha1 = "310b77648d38c223d947ff3f50f511d08690b8d5" -uuid = "a74b3585-a348-5f62-a45c-50e91977d574" -version = "0.7.3" - -[[deps.Blosc_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Lz4_jll", "Pkg", "Zlib_jll", "Zstd_jll"] -git-tree-sha1 = "91d6baa911283650df649d0aea7c28639273ae7b" -uuid = "0b7ba130-8d10-5ba8-a3d6-c5182647fed9" -version = "1.21.1+0" +[[deps.BitIntegers]] +deps = ["Random"] +git-tree-sha1 = "5a814467bda636f3dde5c4ef83c30dd0a19928e0" +uuid = "c3b6d118-76ef-56ca-8cc7-ebb389d030a1" +version = "0.2.6" [[deps.ChainRulesCore]] deps = ["Compat", "LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "9950387274246d08af38f6eef8cb5480862a435f" +git-tree-sha1 = "e7ff6cadf743c098e08fca25c91103ee4303c9bb" uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -version = "1.14.0" +version = "1.15.6" [[deps.Compat]] deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] -git-tree-sha1 = "96b0bc6c52df76506efc8a441c6cf1adcb1babc4" +git-tree-sha1 = "78bee250c6826e1cf805a88b7f1e86025275d208" uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "3.42.0" +version = "3.46.0" [[deps.CompilerSupportLibraries_jll]] deps = ["Artifacts", "Libdl"] uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" +version = "0.5.2+0" [[deps.DataGraphs]] -deps = ["Dictionaries", "Graphs"] -git-tree-sha1 = "5133dc420cea8b8d6da471f8b2d1b74f3db9e3e8" +deps = ["Dictionaries", "Graphs", "MultiDimDictionaries", "NamedGraphs", "SimpleTraits"] +git-tree-sha1 = "1387bc98ca6879f52fece985623dd841a9fbd512" repo-rev = "main" repo-url = "https://github.com/mtfishman/DataGraphs.jl" uuid = "b5a273c3-7e6c-41f6-98bd-8d7f1525a36a" @@ -56,9 +64,9 @@ version = "0.0.1" [[deps.DataStructures]] deps = ["Compat", "InteractiveUtils", "OrderedCollections"] -git-tree-sha1 = "3daef5523dd2e769dad2365274f760ff5f282c7d" +git-tree-sha1 = "d1fff3a548102f48987a52a2e0d114fa97d730f0" uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" -version = "0.18.11" +version = "0.18.13" [[deps.Dates]] deps = ["Printf"] @@ -69,10 +77,10 @@ deps = ["Mmap"] uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" [[deps.Dictionaries]] -deps = ["Indexing", "Random"] -git-tree-sha1 = "7e73a524c6c282e341de2b046e481abedbabd073" +deps = ["Indexing", "Random", "Serialization"] +git-tree-sha1 = "e82c3c97b5b4ec111f3c1b55228cebc7510525a2" uuid = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" -version = "0.3.19" +version = "0.3.25" [[deps.Distributed]] deps = ["Random", "Serialization", "Sockets"] @@ -81,6 +89,7 @@ uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" [[deps.Downloads]] deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" [[deps.ExprTools]] git-tree-sha1 = "56559bbef6ca5ea0c0818fa5c90320398a6fbf8d" @@ -90,29 +99,35 @@ version = "0.1.8" [[deps.FileWatching]] uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" +[[deps.Functors]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "a2657dd0f3e8a61dbe70fc7c122038bd33790af5" +uuid = "d9f16b24-f501-4c13-a1f2-28368ffc5196" +version = "0.3.0" + [[deps.Graphs]] deps = ["ArnoldiMethod", "Compat", "DataStructures", "Distributed", "Inflate", "LinearAlgebra", "Random", "SharedArrays", "SimpleTraits", "SparseArrays", "Statistics"] -git-tree-sha1 = "57c021de207e234108a6f1454003120a1bf350c4" +git-tree-sha1 = "ba2d094a88b6b287bd25cfa86f301e7693ffae2f" uuid = "86223c79-3864-5bf0-83f7-82e725a168b6" -version = "1.6.0" +version = "1.7.4" [[deps.HDF5]] -deps = ["Blosc", "Compat", "HDF5_jll", "Libdl", "Mmap", "Random", "Requires"] -git-tree-sha1 = "698c099c6613d7b7f151832868728f426abe698b" +deps = ["Compat", "HDF5_jll", "Libdl", "Mmap", "Random", "Requires"] +git-tree-sha1 = "19effd6b5af759c8aaeb9c77f89422d3f975ab65" uuid = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" -version = "0.15.7" +version = "0.16.12" [[deps.HDF5_jll]] deps = ["Artifacts", "JLLWrappers", "LibCURL_jll", "Libdl", "OpenSSL_jll", "Pkg", "Zlib_jll"] -git-tree-sha1 = "bab67c0d1c4662d2c4be8c6007751b0b6111de5c" +git-tree-sha1 = "4cc2bb72df6ff40b055295fdef6d92955f9dede8" uuid = "0234f1f7-429e-5d53-9886-15a909be8d59" -version = "1.12.1+0" +version = "1.12.2+2" [[deps.ITensors]] -deps = ["ChainRulesCore", "Compat", "Dictionaries", "HDF5", "IsApprox", "KrylovKit", "LinearAlgebra", "LinearMaps", "NDTensors", "PackageCompiler", "Pkg", "Printf", "Random", "Requires", "SerializedElementArrays", "StaticArrays", "Strided", "TimerOutputs", "TupleTools", "Zeros", "ZygoteRules"] -git-tree-sha1 = "7e3819c79a85a6152e8e0cf08d27dc20e08c739f" +deps = ["Adapt", "BitIntegers", "ChainRulesCore", "Compat", "Dictionaries", "Functors", "HDF5", "IsApprox", "KrylovKit", "LinearAlgebra", "LinearMaps", "NDTensors", "PackageCompiler", "Pkg", "Printf", "Random", "Requires", "SerializedElementArrays", "SimpleTraits", "StaticArrays", "Strided", "TimerOutputs", "TupleTools", "Zeros", "ZygoteRules"] +git-tree-sha1 = "abf22bd129d73c0e46eebd82af1865e66cbbe437" uuid = "9136182c-28ba-11e9-034c-db9fb085ebd5" -version = "0.3.0" +version = "0.3.20" [[deps.Indexing]] git-tree-sha1 = "ce1566720fd6b19ff3411404d4b977acd4814f9f" @@ -120,9 +135,9 @@ uuid = "313cdc1a-70c2-5d6a-ae34-0150d3930a38" version = "1.1.1" [[deps.Inflate]] -git-tree-sha1 = "f5fc07d4e706b84f72d54eedcc1c13d92fb0871c" +git-tree-sha1 = "5cd07aab533df5170988219191dfad0519391428" uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9" -version = "0.1.2" +version = "0.1.3" [[deps.InteractiveUtils]] deps = ["Markdown"] @@ -130,9 +145,9 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[deps.IsApprox]] deps = ["LinearAlgebra"] -git-tree-sha1 = "fcf3bcf04bea6483b9d0aa95cef3963ffb4281be" +git-tree-sha1 = "7627fa9b4c822a2b3ab8d8d39639e0a889a9758e" uuid = "28f27b66-4bd8-47e7-9110-e2746eb8bed7" -version = "0.1.4" +version = "0.1.5" [[deps.JLLWrappers]] deps = ["Preferences"] @@ -142,9 +157,9 @@ version = "1.4.1" [[deps.KrylovKit]] deps = ["LinearAlgebra", "Printf"] -git-tree-sha1 = "0328ad9966ae29ccefb4e1b9bfd8c8867e4360df" +git-tree-sha1 = "49b0c1dd5c292870577b8f58c51072bd558febb9" uuid = "0b1a1467-8014-51b9-945f-bf0ae24f4b77" -version = "0.5.3" +version = "0.5.4" [[deps.LazyArtifacts]] deps = ["Artifacts", "Pkg"] @@ -153,10 +168,12 @@ uuid = "4af54fe1-eca0-43a8-85a7-787d91b784e3" [[deps.LibCURL]] deps = ["LibCURL_jll", "MozillaCACerts_jll"] uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.3" [[deps.LibCURL_jll]] deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "7.83.1+1" [[deps.LibGit2]] deps = ["Base64", "NetworkOptions", "Printf", "SHA"] @@ -165,6 +182,7 @@ uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" [[deps.LibSSH2_jll]] deps = ["Artifacts", "Libdl", "MbedTLS_jll"] uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.10.2+0" [[deps.Libdl]] uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" @@ -175,24 +193,18 @@ uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" [[deps.LinearMaps]] deps = ["LinearAlgebra", "SparseArrays", "Statistics"] -git-tree-sha1 = "e99b76cded02965cba0ed9103cc249efa158a0f2" +git-tree-sha1 = "d1b46faefb7c2f48fdec69e6f3cc34857769bc15" uuid = "7a12625a-238d-50fd-b39a-03d52299707e" -version = "3.6.0" +version = "3.8.0" [[deps.Logging]] uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" -[[deps.Lz4_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "5d494bc6e85c4c9b626ee0cab05daa4085486ab1" -uuid = "5ced341a-0733-55b8-9ab6-a4889d929147" -version = "1.9.3+0" - [[deps.MacroTools]] deps = ["Markdown", "Random"] -git-tree-sha1 = "3d3e902b31198a27340d0bf00d6ac452866021cf" +git-tree-sha1 = "42324d08725e200c23d4dfb549e0d5d89dede2d2" uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" -version = "0.5.9" +version = "0.5.10" [[deps.Markdown]] deps = ["Base64"] @@ -201,30 +213,32 @@ uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" [[deps.MbedTLS_jll]] deps = ["Artifacts", "Libdl"] uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.0+0" [[deps.Mmap]] uuid = "a63ad114-7e13-5084-954f-fe012c677804" [[deps.MozillaCACerts_jll]] uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2022.2.1" [[deps.MultiDimDictionaries]] deps = ["Dictionaries", "Reexport"] -git-tree-sha1 = "263a189a9d69d05db1b9cc0b73868b61d67c3c40" +git-tree-sha1 = "d005a770e691721ce6c2a687ff189d68b5a388ea" repo-rev = "main" repo-url = "https://github.com/mtfishman/MultiDimDictionaries.jl" uuid = "87ff4268-a46e-478f-b30a-76b83dd64e3c" version = "0.0.1" [[deps.NDTensors]] -deps = ["Compat", "Dictionaries", "HDF5", "LinearAlgebra", "Random", "Requires", "StaticArrays", "Strided", "TimerOutputs", "TupleTools"] -git-tree-sha1 = "4fd50190d68f7ee24cf7ea6fd3c5ddbf6351caf8" +deps = ["Adapt", "Compat", "Dictionaries", "Functors", "HDF5", "LinearAlgebra", "Random", "Requires", "SimpleTraits", "StaticArrays", "Strided", "TimerOutputs", "TupleTools"] +git-tree-sha1 = "c04da1fe76f68452be95a1a340b5b806bf7da13f" uuid = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" -version = "0.1.37" +version = "0.1.44" [[deps.NamedGraphs]] -deps = ["Dictionaries", "Graphs", "MultiDimDictionaries"] -git-tree-sha1 = "330999ac30b9bd520e5ef3c67240d2f218208c29" +deps = ["AbstractTrees", "Dictionaries", "Graphs", "MultiDimDictionaries"] +git-tree-sha1 = "e6cf8ecc8adbae51b2554768b7df9750007e8684" repo-rev = "main" repo-url = "https://github.com/mtfishman/NamedGraphs.jl" uuid = "678767b0-92e7-4007-89e4-4527a8725b19" @@ -232,16 +246,18 @@ version = "0.0.1" [[deps.NetworkOptions]] uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" [[deps.OpenBLAS_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" +version = "0.3.20+0" [[deps.OpenSSL_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "ab05aa4cc89736e95915b01e7279e61b1bfe33b8" +git-tree-sha1 = "e60321e3f2616584ff98f0a4f18d98ae6f89bbb3" uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" -version = "1.1.14+0" +version = "1.1.17+0" [[deps.OrderedCollections]] git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c" @@ -249,20 +265,21 @@ uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" version = "1.4.1" [[deps.PackageCompiler]] -deps = ["Artifacts", "LazyArtifacts", "Libdl", "Pkg", "Printf", "RelocatableFolders", "UUIDs"] -git-tree-sha1 = "4ad92047603f8e955503f92767577b32508c39af" +deps = ["Artifacts", "LazyArtifacts", "Libdl", "Pkg", "Printf", "RelocatableFolders", "TOML", "UUIDs"] +git-tree-sha1 = "f31ea705915b4e16e8104727ebd99e0c3993478d" uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d" -version = "2.0.5" +version = "2.1.0" [[deps.Pkg]] deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.8.0" [[deps.Preferences]] deps = ["TOML"] -git-tree-sha1 = "d3538e7f8a790dc8903519090857ef8e1283eecd" +git-tree-sha1 = "47e5f437cc0e7ef2ce8406ce1e7e24d44915f88d" uuid = "21216c6a-2e73-6563-6e65-726566657250" -version = "1.2.5" +version = "1.3.0" [[deps.Printf]] deps = ["Unicode"] @@ -283,9 +300,9 @@ version = "1.2.2" [[deps.RelocatableFolders]] deps = ["SHA", "Scratch"] -git-tree-sha1 = "cdbd3b1338c72ce29d9584fdbe9e9b70eeb5adca" +git-tree-sha1 = "90bc7a7c96410424509e4263e277e43250c05691" uuid = "05181044-ff0b-4ac5-8273-598c1e38db00" -version = "0.1.3" +version = "1.0.0" [[deps.Requires]] deps = ["UUIDs"] @@ -295,12 +312,13 @@ version = "1.3.0" [[deps.SHA]] uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" [[deps.Scratch]] deps = ["Dates"] -git-tree-sha1 = "0b4b7f1393cff97c33891da2a0bf69c6ed241fda" +git-tree-sha1 = "f94f779c94e58bf9ea243e77a37e16d9de9126bd" uuid = "6c6a2e73-6563-6170-7368-637461726353" -version = "1.1.0" +version = "1.1.1" [[deps.Serialization]] uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" @@ -324,15 +342,26 @@ version = "0.9.4" [[deps.Sockets]] uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +[[deps.SparseArrayKit]] +deps = ["LinearAlgebra", "Requires", "TupleTools"] +git-tree-sha1 = "2b2ad655abf78d9a7c34b76e2e7b83b67421f848" +uuid = "a9a3c162-d163-4c15-8926-b8794fbefed2" +version = "0.2.1" + [[deps.SparseArrays]] deps = ["LinearAlgebra", "Random"] uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" [[deps.StaticArrays]] -deps = ["LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "4f6ec5d99a28e1a749559ef7dd518663c5eca3d5" +deps = ["LinearAlgebra", "Random", "StaticArraysCore", "Statistics"] +git-tree-sha1 = "f86b3a049e5d05227b10e15dbb315c5b90f14988" uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.4.3" +version = "1.5.9" + +[[deps.StaticArraysCore]] +git-tree-sha1 = "6b7ba252635a5eff6a0b0664a41ee140a1c9e72a" +uuid = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" +version = "1.4.0" [[deps.Statistics]] deps = ["LinearAlgebra", "SparseArrays"] @@ -340,9 +369,9 @@ uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [[deps.Strided]] deps = ["LinearAlgebra", "TupleTools"] -git-tree-sha1 = "4d581938087ca90eab9bd4bb6d270edaefd70dcd" +git-tree-sha1 = "a7a664c91104329c88222aa20264e1a05b6ad138" uuid = "5e0ebb24-38b0-5f93-81fe-25c709ecae67" -version = "1.1.2" +version = "1.2.3" [[deps.Suppressor]] git-tree-sha1 = "c6ed566db2fe3931292865b966d6d140b7ef32a9" @@ -352,10 +381,12 @@ version = "0.2.1" [[deps.TOML]] deps = ["Dates"] uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.0" [[deps.Tar]] deps = ["ArgTools", "SHA"] uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" [[deps.Test]] deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] @@ -363,9 +394,9 @@ uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [[deps.TimerOutputs]] deps = ["ExprTools", "Printf"] -git-tree-sha1 = "d60b0c96a16aaa42138d5d38ad386df672cb8bd8" +git-tree-sha1 = "9dfcb767e17b0849d6aaf85997c98a5aea292513" uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" -version = "0.5.16" +version = "0.5.21" [[deps.TupleTools]] git-tree-sha1 = "3c712976c47707ff893cf6ba4354aa14db1d8938" @@ -388,12 +419,7 @@ version = "0.3.0" [[deps.Zlib_jll]] deps = ["Libdl"] uuid = "83775a58-1f1d-513f-b197-d71354ab007a" - -[[deps.Zstd_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "e45044cd873ded54b6a5bac0eb5c971392cf1927" -uuid = "3161d3a3-bdf6-5164-811a-617609db77b4" -version = "1.5.2+0" +version = "1.2.12+3" [[deps.ZygoteRules]] deps = ["MacroTools"] @@ -404,11 +430,14 @@ version = "0.2.2" [[deps.libblastrampoline_jll]] deps = ["Artifacts", "Libdl", "OpenBLAS_jll"] uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" +version = "5.1.1+0" [[deps.nghttp2_jll]] deps = ["Artifacts", "Libdl"] uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.47.0+0" [[deps.p7zip_jll]] deps = ["Artifacts", "Libdl"] uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+0" diff --git a/Project.toml b/Project.toml index b5d621da..1a53da93 100644 --- a/Project.toml +++ b/Project.toml @@ -8,10 +8,13 @@ DataGraphs = "b5a273c3-7e6c-41f6-98bd-8d7f1525a36a" Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5" +IsApprox = "28f27b66-4bd8-47e7-9110-e2746eb8bed7" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MultiDimDictionaries = "87ff4268-a46e-478f-b30a-76b83dd64e3c" NamedGraphs = "678767b0-92e7-4007-89e4-4527a8725b19" Requires = "ae029012-a4dd-5104-9daa-d747884805df" +SparseArrayKit = "a9a3c162-d163-4c15-8926-b8794fbefed2" +StaticArrays = "90137ffa-7385-5640-81b9-e52037218182" Suppressor = "fd094767-a336-5f1f-9728-57cf17d0bbfb" [compat] diff --git a/README.md b/README.md index 82c4ae4a..34ac2628 100644 --- a/README.md +++ b/README.md @@ -2,11 +2,13 @@ A package to provide general network data structures and tools to use with ITensors.jl. - +This particular branch contains a draft of the extensions needed to implement sweeping +algorithms for tree tensor networks. ## Installation -This package relies on a few unregistered packages. To install, you will need to do: +This package relies on a few unregistered packages. To install this version, you will need +to do: ```julia julia> using Pkg @@ -17,7 +19,7 @@ julia> Pkg.add(url="https://github.com/mtfishman/NamedGraphs.jl") julia> Pkg.add(url="https://github.com/mtfishman/DataGraphs.jl") -julia> Pkg.add(url="https://github.com/mtfishman/ITensorNetworks.jl") +julia> Pkg.add(url="https://github.com/leburgel/ITensorNetworks.jl#tree_sweeping") ``` diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 32589940..94ad8abf 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -3,15 +3,18 @@ module ITensorNetworks using DataGraphs using Dictionaries using Graphs +using IsApprox using ITensors using ITensors.ITensorVisualizationCore using MultiDimDictionaries using NamedGraphs using Requires +using SparseArrayKit +using StaticArrays using Suppressor # TODO: export from ITensors -using ITensors: commontags +using ITensors: commontags, OneITensor using MultiDimDictionaries: IndexType, SliceIndex using NamedGraphs: @@ -112,7 +115,16 @@ include("indsnetwork.jl") include("sitetype.jl") include("abstractitensornetwork.jl") include("itensornetwork.jl") -include(joinpath("treetensornetwork", "treetensornetwork.jl")) +include(joinpath("treetensornetwork", "abstracttreetensornetwork.jl")) +include(joinpath("treetensornetwork", "ttns.jl")) +include(joinpath("treetensornetwork", "ttno.jl")) +include(joinpath("treetensornetwork", "opsum_to_ttno.jl")) +include(joinpath("treetensornetwork", "abstractprojttno.jl")) +include(joinpath("treetensornetwork", "projttno.jl")) +include(joinpath("treetensornetwork", "projttnosum.jl")) +include(joinpath("graphs", "abstractgraph.jl")) +include(joinpath("graphs", "namedgraphs.jl")) +include("utility.jl") include("exports.jl") diff --git a/src/abstractindsnetwork.jl b/src/abstractindsnetwork.jl index 74be4acc..33b4be3a 100644 --- a/src/abstractindsnetwork.jl +++ b/src/abstractindsnetwork.jl @@ -11,6 +11,8 @@ is_directed(::Type{<:AbstractIndsNetwork}) = false vertex_data(graph::AbstractIndsNetwork, args...) = vertex_data(data_graph(graph), args...) edge_data(graph::AbstractIndsNetwork, args...) = edge_data(data_graph(graph), args...) +to_vertex(tn::AbstractIndsNetwork, args...) = to_vertex(underlying_graph(tn), args...) + # # Index access # diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index e3b1fa72..8591f82f 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -40,6 +40,15 @@ isassigned(tn::AbstractITensorNetwork, index...) = isassigned(data_graph(tn), in # Iteration # +# TODO: discuss if this is the desired behavior +Base.eachindex(tn::AbstractITensorNetwork) = vertices(tn) +Base.iterate(tn::AbstractITensorNetwork) = iterate(vertex_data(tn)) +Base.iterate(tn::AbstractITensorNetwork, state) = iterate(vertex_data(tn), state) + +# TODO: different `map` functionalities as defined for ITensors.AbstractMPS + +# TODO: broadcasting + # # Data modification # @@ -74,6 +83,36 @@ end # Convenience wrapper itensors(tn::AbstractITensorNetwork) = Vector{ITensor}(tn) +# +# Promotion and conversion +# + +function LinearAlgebra.promote_leaf_eltypes(tn::AbstractITensorNetwork) + return LinearAlgebra.promote_leaf_eltypes(itensors(tn)) +end + +function ITensors.promote_itensor_eltype(tn::AbstractITensorNetwork) + return LinearAlgebra.promote_leaf_eltypes(tn) +end + +ITensors.scalartype(tn::AbstractITensorNetwork) = LinearAlgebra.promote_leaf_eltypes(tn) + +# TODO: eltype(::AbstractITensorNetwork) (cannot behave the same as eltype(::ITensors.AbstractMPS)) + +# TODO: mimic ITensors.AbstractMPS implementation using map +function ITensors.convert_leaf_eltype(eltype::Type, tn::AbstractITensorNetwork) + tn = copy(tn) + vertex_data(tn) .= ITensors.convert_leaf_eltype.(Ref(eltype), vertex_data(tn)) + return tn +end + +# TODO: mimic ITensors.AbstractMPS implementation using map +function NDTensors.convert_scalartype(eltype::Type{<:Number}, tn::AbstractITensorNetwork) + tn = copy(tn) + vertex_data(tn) .= ITensors.adapt.(Ref(eltype), vertex_data(tn)) + return tn +end + # # Conversion to Graphs # @@ -142,6 +181,14 @@ function siteinds(tn::AbstractITensorNetwork, vertex...) return uniqueinds(tn, vertex...) end +function siteinds(::typeof(all), tn::AbstractITensorNetwork, vertex...) + return siteinds(tn, vertex...) +end + +function siteinds(::typeof(only), tn::AbstractITensorNetwork, vertex...) + return only(siteinds(tn, vertex...)) +end + function commoninds(tn::AbstractITensorNetwork, edge) e = NamedDimEdge(edge) return commoninds(tn[src(e)], tn[dst(e)]) @@ -151,15 +198,25 @@ function linkinds(tn::AbstractITensorNetwork, edge) return commoninds(tn, edge) end +function linkinds(::typeof(all), tn::AbstractITensorNetwork, edge) + return linkinds(tn, edge) +end + +function linkinds(::typeof(only), tn::AbstractITensorNetwork, edge) + return only(linkinds(tn, edge)) +end + # Priming and tagging (changing Index identifiers) function replaceinds(tn::AbstractITensorNetwork, is_is′::Pair{<:IndsNetwork,<:IndsNetwork}) tn = copy(tn) is, is′ = is_is′ - # TODO: Check that `is` and `is′` have the same vertices and edges. + @assert underlying_graph(is) == underlying_graph(is′) for v in vertices(is) + isassigned(is, v) || continue setindex_preserve_graph!(tn, replaceinds(tn[v], is[v] => is′[v]), v) end for e in edges(is) + isassigned(is, e) || continue for v in (src(e), dst(e)) setindex_preserve_graph!(tn, replaceinds(tn[v], is[e] => is′[e]), v) end @@ -178,7 +235,7 @@ const map_inds_label_functions = [ :setprime, :noprime, :replaceprime, - :swapprime, + :swapprime, # TODO: fix this one (broken) :addtags, :removetags, :replacetags, @@ -196,6 +253,24 @@ for f in map_inds_label_functions function $f(n::Union{IndsNetwork,AbstractITensorNetwork}, args...; kwargs...) return map_inds($f, n, args...; kwargs...) end + + function $f( + ffilter::typeof(linkinds), + n::Union{IndsNetwork,AbstractITensorNetwork}, + args...; + kwargs..., + ) + return map_inds($f, n, args...; sites=[], kwargs...) + end + + function $f( + ffilter::typeof(siteinds), + n::Union{IndsNetwork,AbstractITensorNetwork}, + args...; + kwargs..., + ) + return map_inds($f, n, args...; links=[], kwargs...) + end end end @@ -209,12 +284,15 @@ function ⊗(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork; kwargs... return ⊔(tn1, tn2; kwargs...) end -# TODO: name `inner_network` to denote it is lazy? -# TODO: should this make sure that internal indices -# don't clash? -function inner(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork) - return dag(tn1) ⊗ tn2 -end +# TODO: remove this in favor of `inner_network` defined below? +# seems better if `inner` returns a number for every concrete AbstractITensorNetwork subtype +# +# # TODO: name `inner_network` to denote it is lazy? +# # TODO: should this make sure that internal indices +# # don't clash? +# function inner(tn1::AbstractITensorNetwork, tn2::AbstractITensorNetwork) +# return dag(tn1) ⊗ tn2 +# end # TODO: how to define this lazily? #norm(tn::AbstractITensorNetwork) = sqrt(inner(tn, tn)) @@ -224,31 +302,34 @@ function contract(tn::AbstractITensorNetwork; sequence=vertices(tn), kwargs...) return contract(Vector{ITensor}(tn); sequence=sequence_linear_index, kwargs...) end -function contract(tn::AbstractITensorNetwork, edge::Pair) - return contract(tn, edgetype(tn)(edge)) +function contract!(tn::AbstractITensorNetwork, edge::Pair) + return contract!(tn, edgetype(tn)(edge)) end # Contract the tensors at vertices `src(edge)` and `dst(edge)` # and store the results in the vertex `dst(edge)`, removing # the vertex `src(edge)`. -function contract(tn::AbstractITensorNetwork, edge::AbstractEdge) - tn = copy(tn) +function contract!(tn::AbstractITensorNetwork, edge::AbstractEdge) new_itensor = tn[src(edge)] * tn[dst(edge)] rem_vertex!(tn, src(edge)) tn[dst(edge)] = new_itensor return tn end +function contract(tn::AbstractITensorNetwork, edge) + return contract!(copy(tn), edge) +end + function tags(tn::AbstractITensorNetwork, edge) is = linkinds(tn, edge) return commontags(is) end -function svd(tn::AbstractITensorNetwork, edge::Pair; kwargs...) - return svd(tn, edgetype(tn)(edge)) +function svd!(tn::AbstractITensorNetwork, edge::Pair; kwargs...) + return svd!(tn, edgetype(tn)(edge)) end -function svd( +function svd!( tn::AbstractITensorNetwork, edge::AbstractEdge; U_vertex=src(edge), @@ -258,11 +339,10 @@ function svd( v_tags=tags(tn, edge), kwargs..., ) - tn = copy(tn) left_inds = uniqueinds(tn, edge) U, S, V = svd(tn[src(edge)], left_inds; lefttags=u_tags, right_tags=v_tags, kwargs...) - rem_vertex!(tn, src(edge)) + rem_vertex!(tn, src(edge)) # TODO: avoid this if we can? add_vertex!(tn, U_vertex) tn[U_vertex] = U @@ -275,7 +355,11 @@ function svd( return tn end -function qr( +function svd(tn::AbstractITensorNetwork, edge; kwargs...) + return svd!(copy(tn), edge; kwargs...) +end + +function qr!( tn::AbstractITensorNetwork, edge::AbstractEdge; Q_vertex=src(edge), @@ -283,11 +367,10 @@ function qr( tags=tags(tn, edge), kwargs..., ) - tn = copy(tn) left_inds = uniqueinds(tn, edge) Q, R = factorize(tn[src(edge)], left_inds; tags, kwargs...) - rem_vertex!(tn, src(edge)) + rem_vertex!(tn, src(edge)) # TODO: avoid this if we can? add_vertex!(tn, Q_vertex) tn[Q_vertex] = Q @@ -297,7 +380,11 @@ function qr( return tn end -function factorize( +function qr(tn::AbstractITensorNetwork, edge; kwargs...) + return qr!(copy(tn), edge; kwargs...) +end + +function factorize!( tn::AbstractITensorNetwork, edge::AbstractEdge; X_vertex=src(edge), @@ -305,11 +392,10 @@ function factorize( tags=tags(tn, edge), kwargs..., ) - tn = copy(tn) left_inds = uniqueinds(tn, edge) X, Y = factorize(tn[src(edge)], left_inds; tags, kwargs...) - rem_vertex!(tn, src(edge)) + rem_vertex!(tn, src(edge)) # TODO: avoid this if we can? add_vertex!(tn, X_vertex) tn[X_vertex] = X @@ -319,19 +405,56 @@ function factorize( return tn end -# For ambiguity error -function _orthogonalize_edge(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) - tn = factorize(tn, edge; kwargs...) - new_vertex = only(neighbors(tn, src(edge)) ∩ neighbors(tn, dst(edge))) - return contract(tn, new_vertex => dst(edge)) +function factorize(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + return factorize!(copy(tn), edge; kwargs...) end -function orthogonalize(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) - return _orthogonalize_edge(tn, edge; kwargs...) +# For ambiguity error; TODO: decide whether to use graph mutating methods when resulting graph is unchanged? +function _orthogonalize_edge!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + # factorize!(tn, edge; kwargs...) + # new_vertex = only(neighbors(tn, src(edge)) ∩ neighbors(tn, dst(edge))) + # contract!(tn, new_vertex => dst(edge)) + # return tn + left_inds = uniqueinds(tn, edge) + ltags = tags(tn, edge) + X, Y = factorize(tn[src(edge)], left_inds; tags=ltags, ortho="left", kwargs...) + tn[src(edge)] = X + tn[dst(edge)] *= Y + return tn +end + +function orthogonalize!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + return _orthogonalize_edge!(tn, edge; kwargs...) +end + +function orthogonalize!(tn::AbstractITensorNetwork, edge::Pair; kwargs...) + return orthogonalize!(tn, edgetype(tn)(edge); kwargs...) +end + +function orthogonalize(tn::AbstractITensorNetwork, edge; kwargs...) + return orthogonalize!(copy(tn), edge; kwargs...) +end + +# TODO: decide whether to use graph mutating methods when resulting graph is unchanged? +function _truncate_edge!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + left_inds = uniqueinds(tn, edge) + ltags = tags(tn, edge) + U, S, V = svd(tn[src(edge)], left_inds; lefttags=ltags, ortho="left", kwargs...) + tn[src(edge)] = U + tn[dst(edge)] *= (S * V) + return tn end -function orthogonalize(tn::AbstractITensorNetwork, edge::Pair; kwargs...) - return orthogonalize(tn, edgetype(tn)(edge); kwargs...) +function truncate!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + return _truncate_edge!(tn, edge; kwargs...) +end + +function truncate!(tn::AbstractITensorNetwork, edge::Pair; kwargs...) + return truncate!(tn, edgetype(tn)(edge); kwargs...) +end + +function truncate(tn::AbstractITensorNetwork, edge; kwargs...) + return truncate!(copy(tn), edge; kwargs...) end function optimal_contraction_sequence(tn::AbstractITensorNetwork) @@ -406,3 +529,83 @@ function visualize( end return visualize(Vector{ITensor}(tn), args...; vertex_labels, kwargs...) end + +# +# Link dimensions +# + +function maxlinkdim(tn::AbstractITensorNetwork) + md = 1 + for e in edges(tn) + md = max(md, linkdim(tn, e)) + end + return md +end + +function linkdim(tn::AbstractITensorNetwork, edge::Pair) + return linkdim(tn, edgetype(tn)(edge)) +end + +function linkdim(tn::AbstractITensorNetwork, edge::AbstractEdge) + ls = linkinds(tn, edge) + return prod([isnothing(l) ? 1 : dim(l) for l in ls]) +end + +function linkdims(tn::AbstractITensorNetwork) + return Dictionary(edges(tn), map(e -> linkdim(tn, e), edges(tn))) +end + +# +# Common index checking +# + +function hascommoninds( + ::typeof(siteinds), A::AbstractITensorNetwork, B::AbstractITensorNetwork +) + for v in vertices(A) + !hascommoninds(siteinds(A, v), siteinds(B, v)) && return false + end + return true +end + +function check_hascommoninds( + ::typeof(siteinds), A::AbstractITensorNetwork, B::AbstractITensorNetwork +) + N = nv(A) + if nv(B) ≠ N + throw( + DimensionMismatch( + "$(typeof(A)) and $(typeof(B)) have mismatched number of vertices $N and $(nv(B))." + ), + ) + end + for v in vertices(A) + !hascommoninds(siteinds(A, v), siteinds(B, v)) && error( + "$(typeof(A)) A and $(typeof(B)) B must share site indices. On vertex $v, A has site indices $(siteinds(A, v)) while B has site indices $(siteinds(B, v)).", + ) + end + return nothing +end + +function hassameinds( + ::typeof(siteinds), A::AbstractITensorNetwork, B::AbstractITensorNetwork +) + nv(A) ≠ nv(B) && return false + for v in vertices(A) + !ITensors.hassameinds(siteinds(all, A, v), siteinds(all, B, v)) && return false + end + return true +end + +# +# Site combiners +# + +function site_combiners(tn::AbstractITensorNetwork) + Cs = NamedDimDataGraph{ITensor}(copy(underlying_graph(tn))) + for v in vertices(tn) + s = siteinds(all, tn, v) + Cs[v] = combiner(s; tags=commontags(s)) + end + return Cs +end diff --git a/src/exports.jl b/src/exports.jl index 39bc0743..67036dc5 100644 --- a/src/exports.jl +++ b/src/exports.jl @@ -18,7 +18,11 @@ export grid, dfs_tree, edgetype, is_directed, - rem_vertex! + rem_vertex!, + post_order_dfs_vertices, + edge_path, + vertex_path, + num_neighbors # # NamedGraphs @@ -36,7 +40,8 @@ export NamedDimGraph, is_leaf, incident_edges, comb_tree, - named_comb_tree + named_comb_tree, + rename_vertices # # DataGraphs @@ -55,23 +60,37 @@ export optimal_contraction_sequence # # indsnetwork.jl -export IndsNetwork +export IndsNetwork, merge # itensornetwork.jl export AbstractITensorNetwork, ITensorNetwork, ⊗, itensors, - tensor_product, - TreeTensorNetworkState, - TTNS, + reverse_bfs_edges, data_graph, inner_network, norm_network, - reverse_bfs_edges + default_root_vertex, + ortho_center, + set_ortho_center!, + factorize!, + contract!, + TreeTensorNetworkState, + TTNS, + randomTTNS, + productTTNS, + TreeTensorNetworkOperator, + TTNO, + ProjTTNO, + ProjTTNOSum, + finite_state_machine # lattices.jl export hypercubic_lattice_graph, square_lattice_graph, chain_lattice_graph # partition.jl export partition + +# utility.jl +export relabel_sites diff --git a/src/graphs/abstractgraph.jl b/src/graphs/abstractgraph.jl new file mode 100644 index 00000000..37257398 --- /dev/null +++ b/src/graphs/abstractgraph.jl @@ -0,0 +1,30 @@ +using Graphs.SimpleTraits + +# TODO: remove once this is merged into NamedGraphs.jl + +# paths for tree graphs + +@traitfn function vertex_path(graph::::(!IsDirected), s, t) + dfs_tree_graph = dfs_tree(graph, t...) + return vertex_path(dfs_tree_graph, s, t) +end + +@traitfn function edge_path(graph::::(!IsDirected), s, t) + dfs_tree_graph = dfs_tree(graph, t...) + return edge_path(dfs_tree_graph, s, t) +end + +# assumes the graph is a rooted directed tree with root d +@traitfn function vertex_path(graph::::IsDirected, s, t) + vertices = eltype(graph)[s] + while vertices[end] != t + push!(vertices, parent_vertex(graph, vertices[end]...)) + end + return vertices +end + +@traitfn function edge_path(graph::::IsDirected, s, t) + vertices = vertex_path(graph, s, t) + pop!(vertices) + return [edgetype(graph)(vertex, parent_vertex(graph, vertex...)) for vertex in vertices] +end diff --git a/src/graphs/namedgraphs.jl b/src/graphs/namedgraphs.jl new file mode 100644 index 00000000..b765c165 --- /dev/null +++ b/src/graphs/namedgraphs.jl @@ -0,0 +1,49 @@ +using NamedGraphs: AbstractNamedEdge + +# TODO: remove once this is merged into NamedGraphs.jl + +function Base.:(==)(g1::GT, g2::GT) where {GT<:AbstractNamedGraph} + issetequal(vertices(g1), vertices(g2)) || return false + for v in vertices(g1) + issetequal(inneighbors(g1, v), inneighbors(g2, v)) || return false + issetequal(outneighbors(g1, v), outneighbors(g2, v)) || return false + end + return true +end + +# renaming routines for general named graphs + +function rename_vertices(e::ET, name_map::Dictionary) where {ET<:AbstractNamedEdge} + # strip type parameter to allow renaming to change the vertex type + base_edge_type = Base.typename(ET).wrapper + return base_edge_type(name_map[src(e)], name_map[dst(e)]) +end + +function rename_vertices(g::GT, name_map::Dictionary) where {GT<:AbstractNamedGraph} + original_vertices = vertices(g) + new_vertices = getindices(name_map, original_vertices) + # strip type parameter to allow renaming to change the vertex type + base_graph_type = Base.typename(GT).wrapper + new_g = base_graph_type(new_vertices) + for e in edges(g) + add_edge!(new_g, rename_vertices(e, name_map)) + end + return new_g +end + +function rename_vertices(g::AbstractNamedGraph, name_map::Function) + original_vertices = vertices(g) + return rename_vertices(g, Dictionary(original_vertices, name_map.(original_vertices))) +end + +function NamedGraphs.NamedGraph(vertices::Vector) + return NamedGraph(Graph(length(vertices)), vertices) +end + +function NamedGraphs.NamedDimGraph(vertices::Array) + return NamedDimGraph(Graph(length(vertices)); vertices) +end + +function NamedGraphs.NamedDimDiGraph(vertices::Array) + return NamedDimDiGraph(DiGraph(length(vertices)); vertices) +end diff --git a/src/imports.jl b/src/imports.jl index ace31c6a..995b37e5 100644 --- a/src/imports.jl +++ b/src/imports.jl @@ -14,16 +14,27 @@ import .DataGraphs: underlying_graph, vertex_data, edge_data import Graphs: Graph, is_directed -import LinearAlgebra: svd, factorize, qr +import LinearAlgebra: svd, factorize, qr, normalize, normalize! -import NamedGraphs: vertex_to_parent_vertex, to_vertex +import NamedGraphs: vertex_to_parent_vertex, to_vertex, incident_edges import ITensors: # contraction contract, + contract!, orthogonalize, + orthogonalize!, + isortho, inner, + loginner, norm, + lognorm, + expect, + # truncation + truncate!, + truncate, + replacebond!, + replacebond, # site and link indices siteind, siteinds, @@ -45,7 +56,27 @@ import ITensors: settags, tags, # dag - dag + dag, + # permute + permute, + #commoninds + check_hascommoninds, + hascommoninds, + # linkdims + linkdim, + linkdims, + maxlinkdim, + # projected operators + position!, + set_nsite!, + product, + nsite, + # promotion and conversion + promote_itensor_eltype, + scalartype, + # promotion and conversion + promote_itensor_eltype, + scalartype import ITensors.ContractionSequenceOptimization: optimal_contraction_sequence diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index 03ac7eeb..5919a5d1 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -17,8 +17,7 @@ copy(tn::ITensorNetwork) = ITensorNetwork(copy(data_graph(tn))) # Construction from collections of ITensors # -function ITensorNetwork(ts::Vector{ITensor}) - g = NamedDimGraph(ts) +function ITensorNetwork(ts::Vector{<:ITensor}, g::AbstractGraph) tn = ITensorNetwork(g) for v in eachindex(ts) tn[v] = ts[v] @@ -26,21 +25,30 @@ function ITensorNetwork(ts::Vector{ITensor}) return tn end +function ITensorNetwork(ts::Vector{<:ITensor}) + return ITensorNetwork(ts, NamedDimGraph(ts)) +end + # # Construction from Graphs # +# catch-all for default ElType +function ITensorNetwork(g::AbstractGraph, args...; kwargs...) + return ITensorNetwork(Float64, g, args...; kwargs...) +end + function _ITensorNetwork(g::NamedDimGraph, site_space::Nothing, link_space::Nothing) dg = NamedDimDataGraph{ITensor,ITensor}(copy(g)) return ITensorNetwork(dg) end -function ITensorNetwork(g::NamedDimGraph; kwargs...) - return ITensorNetwork(IndsNetwork(g; kwargs...)) +function ITensorNetwork(::Type{ElT}, g::NamedDimGraph; kwargs...) where {ElT<:Number} + return ITensorNetwork(ElT, IndsNetwork(g; kwargs...)) end -function ITensorNetwork(g::Graph; kwargs...) - return ITensorNetwork(IndsNetwork(g; kwargs...)) +function ITensorNetwork(::Type{ElT}, g::Graph; kwargs...) where {ElT<:Number} + return ITensorNetwork(ElT, IndsNetwork(g; kwargs...)) end # @@ -50,31 +58,39 @@ end # Alternative implementation: # edge_data(e) = [edge_index(e, link_space)] # is_assigned = assign_data(is; edge_data) -function _ITensorNetwork(is::IndsNetwork, link_space) +function _ITensorNetwork(::Type{ElT}, is::IndsNetwork, link_space) where {ElT<:Number} is_assigned = copy(is) for e in edges(is) is_assigned[e] = [edge_index(e, link_space)] end - return _ITensorNetwork(is_assigned, nothing) + return _ITensorNetwork(ElT, is_assigned, nothing) end get_assigned(d, i, default) = isassigned(d, i) ? d[i] : default -function _ITensorNetwork(is::IndsNetwork, link_space::Nothing) +function _ITensorNetwork( + ::Type{ElT}, is::IndsNetwork, link_space::Nothing +) where {ElT<:Number} g = underlying_graph(is) tn = _ITensorNetwork(g, nothing, nothing) for v in vertices(tn) siteinds = get_assigned(is, v, Index[]) linkinds = [get_assigned(is, v => nv, Index[]) for nv in neighbors(is, v)] - setindex_preserve_graph!(tn, ITensor(siteinds, linkinds...), v) + setindex_preserve_graph!(tn, ITensor(ElT, siteinds, linkinds...), v) end return tn end -function ITensorNetwork(is::IndsNetwork; link_space=nothing) - return _ITensorNetwork(is, link_space) +function ITensorNetwork( + ::Type{ElT}, is::IndsNetwork; link_space=nothing +) where {ElT<:Number} + return _ITensorNetwork(ElT, is, link_space) end +# +# Construction from IndsNetwork and state map +# + function insert_links(ψ::ITensorNetwork, edges::Vector=edges(ψ); cutoff=1e-15) for e in edges # Define this to work? @@ -86,11 +102,25 @@ function insert_links(ψ::ITensorNetwork, edges::Vector=edges(ψ); cutoff=1e-15) return ψ end -function ITensorNetwork(is::IndsNetwork, initstate::Function) +function ITensorNetwork( + ::Type{ElT}, is::IndsNetwork, states_map::Dictionary +) where {ElT<:Number} ψ = ITensorNetwork(is) for v in vertices(ψ) - ψ[v] = state(initstate(v), only(is[v])) + ψ[v] = ITensors.convert_leaf_eltype(ElT, state(only(is[v]), states_map[v])) end ψ = insert_links(ψ, edges(is)) return ψ end + +function ITensorNetwork( + ::Type{ElT}, is::IndsNetwork, state::Union{String,Integer} +) where {ElT<:Number} + states_map = dictionary([v => state for v in vertices(is)]) + return ITensorNetwork(ElT, is, states_map) +end + +function ITensorNetwork(::Type{ElT}, is::IndsNetwork, state::Function) where {ElT<:Number} + states_map = dictionary([v => state(v) for v in vertices(is)]) + return ITensorNetwork(ElT, is, states_map) +end diff --git a/src/treetensornetwork/abstractprojttno.jl b/src/treetensornetwork/abstractprojttno.jl new file mode 100644 index 00000000..a6d2b4aa --- /dev/null +++ b/src/treetensornetwork/abstractprojttno.jl @@ -0,0 +1,141 @@ +abstract type AbstractProjTTNO end + +copy(::AbstractProjTTNO) = error("Not implemented") + +set_nsite!(::AbstractProjTTNO, nsite) = error("Not implemented") + +make_environment!(::AbstractProjTTNO, psi, e) = error("Not implemented") + +underlying_graph(P::AbstractProjTTNO) = underlying_graph(P.H) + +pos(P::AbstractProjTTNO) = P.pos + +Graphs.edgetype(P::AbstractProjTTNO) = edgetype(underlying_graph(P)) + +on_edge(P::AbstractProjTTNO) = isa(pos(P), edgetype(P)) + +nsite(P::AbstractProjTTNO) = on_edge(P) ? 0 : length(pos(P)) + +function sites(P::AbstractProjTTNO) + on_edge(P) && return eltype(underlying_graph(P))[] + return pos(P) +end + +function incident_edges(P::AbstractProjTTNO)::Vector{NamedDimEdge{Tuple}} + on_edge(P) && return [pos(P), reverse(pos(P))] + edges = [ + [edgetype(P)(n => v) for n in setdiff(neighbors(underlying_graph(P), v), sites(P))] for + v in sites(P) + ] + return collect(Base.Iterators.flatten(edges)) +end + +function internal_edges(P::AbstractProjTTNO)::Vector{NamedDimEdge{Tuple}} + on_edge(P) && return edgetype(P)[] + edges = [ + [edgetype(P)(v => n) for n in neighbors(underlying_graph(P), v) ∩ sites(P)] for + v in sites(P) + ] + return collect(Base.Iterators.flatten(edges)) +end + +function environment(P::AbstractProjTTNO, edge::NamedDimEdge{Tuple})::ITensor + return P.environments[edge] +end + +# there has to be a better way to do this... +function _separate_first(V::Vector) + sep = Base.Iterators.peel(V) + isnothing(sep) && return eltype(V)[], eltype(V)[] + return sep[1], collect(sep[2]) +end + +function _separate_first_two(V::Vector) + frst, rst = _separate_first(V) + scnd, rst = _separate_first(rst) + return frst, scnd, rst +end + +function contract(P::AbstractProjTTNO, v::ITensor)::ITensor + environments = ITensor[environment(P, edge) for edge in incident_edges(P)] + # manual heuristic for contraction order fixing: for each site in ProjTTNO, apply up to + # two environments, then TTNO tensor, then other environments + if on_edge(P) + itensor_map = environments + else + itensor_map = Union{ITensor,OneITensor}[] # TODO: will a Hamiltonian TTNO tensor ever be a OneITensor? + for s in sites(P) + site_envs = filter(hascommoninds(P.H[s]), environments) + frst, scnd, rst = _separate_first_two(site_envs) + site_tensors = vcat(frst, scnd, P.H[s], rst) + append!(itensor_map, site_tensors) + end + end + # TODO: actually use optimal contraction sequence here + Hv = v + for it in itensor_map + Hv *= it + end + return Hv +end + +function product(P::AbstractProjTTNO, v::ITensor)::ITensor + Pv = contract(P, v) + if order(Pv) != order(v) + error( + string( + "The order of the ProjTTNO-ITensor product P*v is not equal to the order of the ITensor v, ", + "this is probably due to an index mismatch.\nCommon reasons for this error: \n", + "(1) You are trying to multiply the ProjTTNO with the $(nsite(P))-site wave-function at the wrong position.\n", + "(2) `orthogonalize!` was called, changing the MPS without updating the ProjTTNO.\n\n", + "P*v inds: $(inds(Pv)) \n\n", + "v inds: $(inds(v))", + ), + ) + end + return noprime(Pv) +end + +(P::AbstractProjTTNO)(v::ITensor) = product(P, v) + +function Base.eltype(P::AbstractProjTTNO)::Type + ElType = eltype(P.H(first(sites(P)))) + for v in sites(P) + ElType = promote_type(ElType, eltype(P.H[v])) + end + for e in incident_edges(P) + ElType = promote_type(ElType, eltype(environments(P, e))) + end + return ElType +end + +function Base.size(P::AbstractProjTTNO)::Tuple{Int,Int} + d = 1 + for e in incident_edges(P) + for i in inds(environment(P, e)) + plev(i) > 0 && (d *= dim(i)) + end + end + for j in sites(P) + for i in inds(P.H[j]) + plev(i) > 0 && (d *= dim(i)) + end + end + return (d, d) +end + +function position!( + P::AbstractProjTTNO, psi::TTNS, pos::Union{Vector{<:Tuple},NamedDimEdge{Tuple}} +) + # shift position + P.pos = pos + # invalidate environments corresponding to internal edges + for e in internal_edges(P) + unset!(P.environments, e) + end + # make all environments surrounding new position + for e in incident_edges(P) + make_environment!(P, psi, e) + end + return P +end diff --git a/src/treetensornetwork/abstracttreetensornetwork.jl b/src/treetensornetwork/abstracttreetensornetwork.jl new file mode 100644 index 00000000..cc2226fa --- /dev/null +++ b/src/treetensornetwork/abstracttreetensornetwork.jl @@ -0,0 +1,399 @@ +abstract type AbstractTreeTensorNetwork <: AbstractITensorNetwork end + +# +# Field access +# + +ITensorNetwork(ψ::AbstractTreeTensorNetwork) = ψ.itensor_network +ortho_center(ψ::AbstractTreeTensorNetwork) = ψ.ortho_center + +function default_root_vertex(gs::AbstractGraph...) + # @assert all(is_tree.(gs)) + return first(leaf_vertices(gs[end])) +end + +# +# Orthogonality center +# + +isortho(ψ::AbstractTreeTensorNetwork) = isone(length(ortho_center(ψ))) + +function set_ortho_center!(ψ::AbstractTreeTensorNetwork, new_center::Vector{<:Tuple}) + ψ.ortho_center = new_center + return ψ +end + +function set_ortho_center(ψ::AbstractTreeTensorNetwork, new_center::Vector{<:Tuple}) + return set_ortho_center!(copy(ψ), new_center) +end + +reset_ortho_center!(ψ::AbstractTreeTensorNetwork) = set_ortho_center!(ψ, vertices(ψ)) + +# +# Dense constructors +# + +# construct from dense ITensor, using IndsNetwork of site indices +function (::Type{TTNT})( + A::ITensor, is::IndsNetwork; ortho_center=default_root_vertex(is), kwargs... +) where {TTNT<:AbstractTreeTensorNetwork} + for v in vertices(is) + @assert hasinds(A, is[v]) + end + @assert ortho_center ∈ vertices(is) + ψ = ITensorNetwork(is) + Ã = A + for e in post_order_dfs_edges(ψ, ortho_center) + left_inds = uniqueinds(is, e) + L, R = factorize(Ã, left_inds; tags=edge_tag(e), ortho="left", kwargs...) + l = commonind(L, R) + ψ[src(e)] = L + is[e] = [l] + Ã = R + end + ψ[ortho_center] = Ã + T = TTNT(ψ) + orthogonalize!(T, ortho_center) + return T +end + +# construct from dense ITensor, using NamedDimGraph and vector of site indices +# TODO: remove if it doesn't turn out to be useful +function (::Type{TTNT})( + A::ITensor, sites::Vector, g::NamedDimGraph; vertex_order=vertices(g), kwargs... +) where {TTNT<:AbstractTreeTensorNetwork} + is = IndsNetwork(g; site_space=Dictionary(vertex_order, sites)) + return TTNT(A, is; kwargs...) +end + +# construct from dense array, using IndsNetwork +# TODO: probably remove this one, doesn't seem very useful +function (::Type{TTNT})( + A::AbstractArray{<:Number}, is::IndsNetwork; vertex_order=vertices(is), kwargs... +) where {TTNT<:AbstractTreeTensorNetwork} + sites = [is[v] for v in vertex_order] + return TTNT(itensor(A, sites...), is; kwargs...) +end + +# construct from dense array, using NamedDimGraph and vector of site indices +function (::Type{TTNT})( + A::AbstractArray{<:Number}, sites::Vector, args...; kwargs... +) where {TTNT<:AbstractTreeTensorNetwork} + return TTNT(itensor(A, sites...), sites, args...; kwargs...) +end + +# +# Orthogonalization +# + +function orthogonalize!(ψ::AbstractTreeTensorNetwork, root_vertex::Tuple) + (isortho(ψ) && only(ortho_center(ψ)) == root_vertex) && return ψ + if isortho(ψ) + edge_list = edge_path(ψ, only(ortho_center(ψ)), root_vertex) + else + edge_list = post_order_dfs_edges(ψ, root_vertex) + end + for e in edge_list + ψ = orthogonalize!(ψ, e) + end + set_ortho_center!(ψ, [root_vertex]) + return ψ +end + +function orthogonalize!(ψ::AbstractTreeTensorNetwork, root_vertex...; kwargs...) + return orthogonalize!(ψ, to_vertex(ψ, root_vertex...); kwargs...) +end + +# For ambiguity error +function orthogonalize!(ψ::AbstractTreeTensorNetwork, edge::AbstractEdge; kwargs...) + return _orthogonalize_edge!(ψ, edge; kwargs...) +end + +function orthogonalize(ψ::AbstractTreeTensorNetwork, args...; kwargs...) + return orthogonalize!(copy(ψ), args...; kwargs...) +end + +# +# Truncation +# + +function truncate!( + ψ::AbstractTreeTensorNetwork; root_vertex::Tuple=default_root_vertex(ψ), kwargs... +) + for e in post_order_dfs_edges(ψ, root_vertex) + # always orthogonalize towards source first to make truncations controlled + orthogonalize!(ψ, src(e)) + truncate!(ψ, e; kwargs...) + set_ortho_center!(ψ, [dst(e)]) + end + return ψ +end + +# For ambiguity error +function truncate!(ψ::AbstractTreeTensorNetwork, edge::AbstractEdge; kwargs...) + return _truncate_edge!(ψ, edge; kwargs...) +end + +function truncate(ψ::AbstractTreeTensorNetwork, args...; kwargs...) + return truncate!(copy(ψ), args...; kwargs...) +end + +# +# Contraction +# + +# TODO: decide on contraction order: reverse dfs vertices or forward dfs edges? +function contract( + ψ::AbstractTreeTensorNetwork, root_vertex::Tuple=default_root_vertex(ψ); kwargs... +) + ψ = copy(ψ) + # reverse post order vertices + traversal_order = reverse(post_order_dfs_vertices(ψ, root_vertex)) + return contract(ITensorNetwork(ψ); sequence=traversal_order, kwargs...) + # # forward post order edges + # ψ = copy(ψ) + # for e in post_order_dfs_edges(ψ, root_vertex) + # contract!(ψ, e) + # end + # return ψ[root_vertex] +end + +function inner( + ϕ::AbstractTreeTensorNetwork, + ψ::AbstractTreeTensorNetwork; + root_vertex=default_root_vertex(ϕ, ψ), +) + ϕᴴ = sim(dag(ψ); sites=[]) + ψ = sim(ψ; sites=[]) + ϕψ = ϕᴴ ⊗ ψ + # TODO: find the largest tensor and use it as + # the `root_vertex`. + for e in post_order_dfs_edges(ψ, root_vertex) + if has_vertex(ϕψ, 2, src(e)...) + ϕψ = contract(ϕψ, (2, src(e)...) => (1, src(e)...)) + end + ϕψ = contract(ϕψ, (1, src(e)...) => (1, dst(e)...)) + if has_vertex(ϕψ, 2, dst(e)...) + ϕψ = contract(ϕψ, (2, dst(e)...) => (1, dst(e)...)) + end + end + return ϕψ[1, root_vertex...][] +end + +function norm(ψ::AbstractTreeTensorNetwork) + if isortho(ψ) + return norm(ψ[only(ortho_center(ψ))]) + end + return √(abs(real(inner(ψ, ψ)))) +end + +# +# Utility +# + +function normalize!(ψ::AbstractTreeTensorNetwork) + c = ortho_center(ψ) + lognorm_ψ = lognorm(ψ) + if lognorm_ψ == -Inf + return ψ + end + z = exp(lognorm_ψ / length(c)) + for v in c + ψ[v] ./= z + end + return ψ +end + +function normalize(ψ::AbstractTreeTensorNetwork) + return normalize!(copy(ψ)) +end + +function _apply_to_orthocenter!(f, ψ::AbstractTreeTensorNetwork, x) + v = first(ortho_center(ψ)) + ψ[v] = f(ψ[v], x) + return ψ +end + +function _apply_to_orthocenter(f, ψ::AbstractTreeTensorNetwork, x) + return _apply_to_orthocenter!(f, copy(ψ), x) +end + +Base.:*(ψ::AbstractTreeTensorNetwork, α::Number) = _apply_to_orthocenter(*, ψ, α) + +Base.:*(α::Number, ψ::AbstractTreeTensorNetwork) = ψ * α + +Base.:/(ψ::AbstractTreeTensorNetwork, α::Number) = _apply_to_orthocenter(/, ψ, α) + +Base.:-(ψ::AbstractTreeTensorNetwork) = -1 * ψ + +function LinearAlgebra.rmul!(ψ::AbstractTreeTensorNetwork, α::Number) + return _apply_to_orthocenter!(*, ψ, α) +end + +function lognorm(ψ::AbstractTreeTensorNetwork) + if isortho(ψ) + return log(norm(ψ[only(ortho_center(ψ))])) + end + lognorm2_ψ = logdot(ψ, ψ) + rtol = eps(real(scalartype(ψ))) * 10 + atol = rtol + if !IsApprox.isreal(lognorm2_ψ, Approx(; rtol=rtol, atol=atol)) + @warn "log(norm²) is $lognorm2_T, which is not real up to a relative tolerance of $rtol and an absolute tolerance of $atol. Taking the real part, which may not be accurate." + end + return 0.5 * real(lognorm2_ψ) +end + +function logdot(ψ1::TTNT, ψ2::TTNT; kwargs...) where {TTNT<:AbstractTreeTensorNetwork} + return _log_or_not_dot(ψ1, ψ2, true; kwargs...) +end + +function loginner(ψ1::TTNT, ψ2::TTNT; kwargs...) where {TTNT<:AbstractTreeTensorNetwork} + return logdot(ψ1, ψ2; kwargs...) +end + +# TODO: stick with this traversal or find optimal contraction sequence? +function _log_or_not_dot( + ψ1::TTNT, ψ2::TTNT, loginner::Bool; root_vertex=default_root_vertex(ψ1, ψ2) +)::Number where {TTNT<:AbstractTreeTensorNetwork} + N = nv(ψ1) + if nv(ψ2) != N + throw(DimensionMismatch("inner: mismatched number of vertices $N and $(nv(ψ2))")) + end + ψ1dag = sim(dag(ψ1); sites=[]) + traversal_order = reverse(post_order_dfs_vertices(ψ1, root_vertex)) + check_hascommoninds(siteinds, ψ1dag, ψ2) + + O = ψ1dag[root_vertex] * ψ2[root_vertex] + + if loginner + normO = norm(O) + log_inner_tot = log(normO) + O ./= normO + end + + for v in traversal_order[2:end] + O = (O * ψ1dag[v]) * ψ2[v] + + if loginner + normO = norm(O) + log_inner_tot += log(normO) + O ./= normO + end + end + + if loginner + if !isreal(O[]) || real(O[]) < 0 + log_inner_tot += log(complex(O[])) + end + return log_inner_tot + end + + dot_ψ1_ψ2 = O[] + + if !isfinite(dot_ψ1_ψ2) + @warn "The inner product (or norm²) you are computing is very large ($dot_ψ1_ψ2). You should consider using `lognorm` or `loginner` instead, which will help avoid floating point errors. For example if you are trying to normalize your MPS/MPO `A`, the normalized MPS/MPO `B` would be given by `B = A ./ z` where `z = exp(lognorm(A) / length(A))`." + end + + return dot_ψ1_ψ2 +end + +function _add_maxlinkdims(ψs::AbstractTreeTensorNetwork...) + maxdims = Dictionary{edgetype(ψs[1]),Int}() + for e in edges(ψs[1]) + maxdims[e] = sum(ψ -> linkdim(ψ, e), ψs) + maxdims[reverse(e)] = maxdims[e] + end + return maxdims +end + +# TODO: actually implement this? +function Base.:+( + ::ITensors.Algorithm"densitymatrix", + ψs::TTNT...; + cutoff=1e-15, + root_vertex=default_root_vertex(ψs...), + kwargs..., +) where {TTNT<:AbstractTreeTensorNetwork} + return error("Not implemented (yet) for trees.") +end + +function Base.:+( + ::ITensors.Algorithm"directsum", ψs::TTNT...; root_vertex=default_root_vertex(ψs...) +) where {TTNT<:AbstractTreeTensorNetwork} + @assert all(ψ -> nv(first(ψs)) == nv(ψ), ψs) + + # Output state + ϕ = TTNS(siteinds(ψs[1])) + + vs = post_order_dfs_vertices(ϕ, root_vertex) + es = post_order_dfs_edges(ϕ, root_vertex) + link_space = Dict{edgetype(ϕ),Index}() + + for v in reverse(vs) + edges = filter(e -> dst(e) == v || src(e) == v, es) + dims_in = findall(e -> dst(e) == v, edges) + dim_out = findfirst(e -> src(e) == v, edges) + + ls = [Tuple(linkinds(only, ψ, e) for e in edges) for ψ in ψs] + ϕv, lv = directsum((ψs[i][v] => ls[i] for i in 1:length(ψs))...; tags=tags.(first(ls))) + for din in dims_in + link_space[edges[din]] = lv[din] + end + if !isnothing(dim_out) + ϕv = replaceind(ϕv, lv[dim_out] => dag(link_space[edges[dim_out]])) + end + + ϕ[v] = ϕv + end + return convert(TTNT, ϕ) +end + +# TODO: switch default algorithm once more are implemented +function Base.:+( + ψs::AbstractTreeTensorNetwork...; alg=ITensors.Algorithm"directsum"(), kwargs... +) + return +(ITensors.Algorithm(alg), ψs...; kwargs...) +end + +Base.:+(ψ::AbstractTreeTensorNetwork) = ψ + +ITensors.add(ψs::AbstractTreeTensorNetwork...; kwargs...) = +(ψs...; kwargs...) + +function Base.:-(ψ1::AbstractTreeTensorNetwork, ψ2::AbstractTreeTensorNetwork; kwargs...) + return +(ψ1, -ψ2; kwargs...) +end + +function ITensors.add(A::T, B::T; kwargs...) where {T<:AbstractTreeTensorNetwork} + return +(A, B; kwargs...) +end + +function permute( + ψ::TTNT, ::Tuple{typeof(linkind),typeof(siteinds),typeof(linkind)} +)::TTNT where {TTNT<:AbstractTreeTensorNetwork} + ψ̃ = TTNT(underlying_graph(ψ)) + for v in vertices(ψ) + ls = [only(linkinds(ψ, n => v)) for n in neighbors(ψ, v)] # TODO: won't work for multiple indices per link... + ss = sort(Tuple(siteinds(ψ, v)); by=plev) + setindex_preserve_graph!( + ψ̃, permute(ψ[v], filter(!isnothing, (ls[1], ss..., ls[2:end]...))), v + ) + end + set_ortho_center!(ψ̃, ortho_center(ψ)) + return ψ̃ +end + +function Base.isapprox( + x::AbstractTreeTensorNetwork, + y::AbstractTreeTensorNetwork; + atol::Real=0, + rtol::Real=Base.rtoldefault( + LinearAlgebra.promote_leaf_eltypes(x), LinearAlgebra.promote_leaf_eltypes(y), atol + ), +) + d = norm(x - y) + if isfinite(d) + return d <= max(atol, rtol * max(norm(x), norm(y))) + else + error("In `isapprox(x::TTNS, y::TTNS)`, `norm(x - y)` is not finite") + end +end diff --git a/src/treetensornetwork/opsum_to_ttno.jl b/src/treetensornetwork/opsum_to_ttno.jl new file mode 100644 index 00000000..1d7de470 --- /dev/null +++ b/src/treetensornetwork/opsum_to_ttno.jl @@ -0,0 +1,589 @@ +# convert ITensors.OpSum to TreeTensorNetworkOperator + +# TODO: fix symbolic SVD compression for certain combinations of long-range interactions! + +# +# Utility methods +# + +# number of neighbors of vertex in graph +function num_neighbors(graph::AbstractGraph, vertex...) + return length(neighbors(graph, vertex...)) +end + +# linear ordering of vertices in tree graph relative to chosen root +function find_index_in_tree(site, g::AbstractGraph, root_vertex) + ordering = post_order_dfs_vertices(g, root_vertex) + return findfirst(x -> x == site, ordering) +end + +function find_index_in_tree(o::Op, g::AbstractGraph, root_vertex) + return find_index_in_tree(ITensors.site(o), g::AbstractGraph, root_vertex) +end + +# determine 'support' of product operator on tree graph +function span(t::Scaled{C,Prod{Op}}, g::AbstractGraph) where {C} + spn = eltype(g)[] + nterms = length(t) + for i in 1:nterms, j in i:nterms + path = vertex_path(g, ITensors.site(t[i]), ITensors.site(t[j])) + spn = union(spn, path) + end + return spn +end + +# determine whether an operator string crosses a given graph vertex +function crosses_vertex(t::Scaled{C,Prod{Op}}, g::AbstractGraph, v) where {C} + return v ∈ span(t, g) +end + +# annoying thing to allow sparse arrays with ITensors.Op entries +# TODO: get rid of this +function Base.zero(::Type{Scaled{C,Prod{Op}}}) where {C} + return zero(C) * Prod([Op("0")]) +end +Base.zero(t::Scaled) = zero(typeof(t)) + +# +# Tree adaptations of functionalities in ITensors.jl/src/physics/autompo/opsum_to_mpo.jl +# + +""" + finite_state_machine(os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple) where {C} + +Finite state machine generator for ITensors.OpSum Hamiltonian defined on a tree graph. The +site Index graph must be a tree graph, and the chosen root must be a leaf vertex of this +tree. Returns a DataGraph of SparseArrayKit.SparseArrays +""" +function finite_state_machine( + os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple +) where {C} + os = deepcopy(os) + os = sorteachterm(os, sites, root_vertex) + os = ITensors.sortmergeterms(os) + + ValType = ITensors.determineValType(ITensors.terms(os)) + + # sparse symbolic representation of the TTNO Hamiltonian as a DataGraph of SparseArrays + sparseTTNO = NamedDimDataGraph{SparseArray{Scaled{ValType,Prod{Op}}}}( + underlying_graph(sites) + ) + + # some things to keep track of + vs = post_order_dfs_vertices(sites, root_vertex) # store vertices in fixed ordering relative to root + es = post_order_dfs_edges(sites, root_vertex) # store edges in fixed ordering relative to root + ranks = Dict(v => num_neighbors(sites, v) for v in vs) # rank of every TTNO tensor in network + linkmaps = Dict(e => Dict{Prod{Op},Int}() for e in es) # map from term in Hamiltonian to edge channel index for every edge + site_coef_done = Prod{Op}[] # list of Hamiltonian terms for which the coefficient has been added to a site factor + edge_orders = NamedDimDataGraph{Vector{edgetype(sites)}}(underlying_graph(sites)) # relate indices of sparse TTNO tensor to incident graph edges for each site + + for v in vs + # collect all nontrivial entries of the TTNO tensor at vertex v + entries = Tuple{MVector{ranks[v],Int},Scaled{C,Prod{Op}}}[] # MVector might be overkill... + + # for every vertex, find all edges that contain this vertex + edges = filter(e -> dst(e) == v || src(e) == v, es) + # use the corresponding ordering as index order for tensor elements at this site + edge_orders[v] = edges + dims_in = findall(e -> dst(e) == v, edges) + edges_in = edges[dims_in] + dim_out = findfirst(e -> src(e) == v, edges) + edge_out = (isnothing(dim_out) ? [] : edges[dim_out]) + + # sanity check, leaves only have single incoming or outgoing edge + @assert !isempty(dims_in) || !isnothing(dim_out) + (isempty(dims_in) || isnothing(dim_out)) && @assert is_leaf(sites, v) + + for term in os + # loop over OpSum and pick out terms that act on current vertex + crosses_vertex(term, sites, v) || continue + + # for every incoming edge, filter out factors that come in from the direction of + # that edge + incoming = Dict( + e => filter(t -> e ∈ edge_path(sites, ITensors.site(t), v), ITensors.terms(term)) + for e in edges_in + ) + # filter out factor that acts on current vertex + onsite = filter(t -> (ITensors.site(t) == v), ITensors.terms(term)) + # filter out factors that go out along the outgoing edge + outgoing = filter( + t -> edge_out ∈ edge_path(sites, v, ITensors.site(t)), ITensors.terms(term) + ) + + # translate into tensor entry + T_inds = fill(-1, ranks[v]) + for din in dims_in + if !isempty(incoming[edges[din]]) + T_inds[din] = ITensors.posInLink!(linkmaps[edges[din]], ITensors.argument(term)) # get incoming channel + end + end + if !isnothing(dim_out) && !isempty(outgoing) + T_inds[dim_out] = ITensors.posInLink!(linkmaps[edge_out], ITensors.argument(term)) # add outgoing channel + end + # if term starts at this site, add its coefficient as a site factor + site_coef = one(C) + if (isempty(dims_in) || all(T_inds[dims_in] .== -1)) && + ITensors.argument(term) ∉ site_coef_done + site_coef = ITensors.coefficient(term) + push!(site_coef_done, ITensors.argument(term)) + end + # add onsite identity for interactions passing through vertex + if isempty(onsite) + if !ITensors.using_auto_fermion() && isfermionic(outgoing, sites) # TODO: check if fermions are actually supported here! + push!(onsite, Op("F", v)) + else + push!(onsite, Op("Id", v)) + end + end + # save indices and value of sparse tensor entry + el = (MVector{ranks[v]}(T_inds), site_coef * Prod(onsite)) + push!(entries, el) + end + + # handle start and end of operator terms and convert to sparse array + linkdims = Tuple([ + (isempty(linkmaps[e]) ? 0 : maximum(values(linkmaps[e]))) + 2 for e in edges + ]) + T = SparseArray{Scaled{ValType,Prod{Op}},ranks[v]}(undef, linkdims) + for (T_inds, t) in entries + if !isempty(dims_in) + start_dims = filter(d -> T_inds[d] == -1, dims_in) + normal_dims = filter(d -> T_inds[d] != -1, dims_in) + T_inds[start_dims] .= 1 # always start in first channel + T_inds[normal_dims] .+= 1 # shift regular channels + end + if !isnothing(dim_out) + if T_inds[dim_out] == -1 + T_inds[dim_out] = linkdims[dim_out] # always end in last channel + else + T_inds[dim_out] += 1 # shift regular channel + end + end + T[T_inds...] = t + end + # add starting and ending identity operators + if !isnothing(dim_out) + T[ones(Int, ranks[v])...] = 1 * Prod([Op("Id", v)]) # starting identity is easy + end + # ending identities not so much + idT_end_inds = ones(Int, ranks[v]) + if !isnothing(dim_out) + idT_end_inds[dim_out] = linkdims[dim_out] + end + for din in dims_in + idT_end_inds[din] = linkdims[din] + T[idT_end_inds...] = 1 * Prod([Op("Id", v)]) + idT_end_inds[din] = 1 # reset + end + sparseTTNO[v] = T + end + return sparseTTNO, edge_orders +end + +""" + fsmTTNO(os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple, kwargs...) where {C} + +Construct a dense TreeTensorNetworkOperator from sparse finite state machine +represenatation, without compression. +""" +function fsmTTNO( + os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple +)::TTNO where {C} + ValType = ITensors.determineValType(ITensors.terms(os)) + # start from finite state machine + fsm, edge_orders = finite_state_machine(os, sites, root_vertex) + # some trickery to get link dimension for every edge + link_space = Dict{edgetype(sites),Index}() + function get_linkind!(link_space, e) + if !haskey(link_space, e) + d = findfirst(x -> (x == e || x == reverse(e)), edge_orders[src(e)]) + link_space[e] = Index(size(fsm[src(e)], d), edge_tag(e)) + end + return link_space[e] + end + # compress finite state machine into dense form + H = TTNO(sites) + for v in vertices(sites) + linkinds = [get_linkind!(link_space, e) for e in edge_orders[v]] + linkdims = dim.(linkinds) + H[v] = ITensor() + for (T_ind, t) in nonzero_pairs(fsm[v]) + (abs(coefficient(t)) > eps()) || continue + T = zeros(ValType, linkdims...) + ct = convert(ValType, coefficient(t)) + T[T_ind] += ct + T = itensor(T, linkinds) + H[v] += T * computeSiteProd(sites, ITensors.argument(t)) + end + end + return H +end + +# this is broken for certain combinations of longer-range interactions, no idea why... +""" + svdTTNO(os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple, kwargs...) where {C} + +Construct a dense TreeTensorNetworkOperator from a symbolic OpSum representation of a +Hamiltonian, compressin shared interaction channels. +""" +function svdTTNO( + os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple; kwargs... +)::TTNO where {C} + mindim::Int = get(kwargs, :mindim, 1) + maxdim::Int = get(kwargs, :maxdim, 10000) + cutoff::Float64 = get(kwargs, :cutoff, 1E-15) + + ValType = ITensors.determineValType(ITensors.terms(os)) + + # some things to keep track of + vs = post_order_dfs_vertices(sites, root_vertex) # store vertices in fixed ordering relative to root + es = post_order_dfs_edges(sites, root_vertex) # store edges in fixed ordering relative to root + ranks = Dict(v => num_neighbors(sites, v) for v in vs) # rank of every TTNO tensor in network + Vs = Dict(e => Matrix{ValType}(undef, 1, 1) for e in es) # link isometries for SVD compression of TTNO + leftmaps = Dict(e => Dict{Vector{Op},Int}() for e in es) # map from term in Hamiltonian to edge left channel index for every edge + rightmaps = Dict(e => Dict{Vector{Op},Int}() for e in es) # map from term in Hamiltonian to edge right channel index for every edge + leftbond_coefs = Dict(e => ITensors.MatElem{ValType}[] for e in es) # bond coefficients for left edge channels + site_coef_done = Prod{Op}[] # list of terms for which the coefficient has been added to a site factor + bond_coef_done = Dict(v => Prod{Op}[] for v in vs) # list of terms for which the coefficient has been added to a bond matrix for each vertex + + # temporary symbolic representation of TTNO Hamiltonian + tempTTNO = Dict(v => Tuple{MVector{ranks[v],Int},Scaled{C,Prod{Op}}}[] for v in vs) + + # build compressed finite state machine representation + for v in vs + # for every vertex, find all edges that contain this vertex + edges = filter(e -> dst(e) == v || src(e) == v, es) + # use the corresponding ordering as index order for tensor elements at this site + dims_in = findall(e -> dst(e) == v, edges) + edges_in = edges[dims_in] + dim_out = findfirst(e -> src(e) == v, edges) + edge_out = (isnothing(dim_out) ? [] : edges[dim_out]) + + # sanity check, leaves only have single incoming or outgoing edge + @assert !isempty(dims_in) || !isnothing(dim_out) + (isempty(dims_in) || isnothing(dim_out)) && @assert is_leaf(sites, v) + + for term in os + # loop over OpSum and pick out terms that act on current vertex + crosses_vertex(term, sites, v) || continue + + # for every incoming edge, filter out factors that come in from the direction of + # that edge + incoming = Dict( + e => filter(t -> e ∈ edge_path(sites, ITensors.site(t), v), ITensors.terms(term)) + for e in edges_in + ) + # filter out factor that acts on current vertex + onsite = filter(t -> (ITensors.site(t) == v), ITensors.terms(term)) + # filter out factors that go out along the outgoing edge + outgoing = filter( + t -> edge_out ∈ edge_path(sites, v, ITensors.site(t)), ITensors.terms(term) + ) + + # translate into tensor entry + T_inds = fill(-1, ranks[v]) + # channel merging still not working properly somehow! + for din in dims_in + bond_row = -1 + bond_col = -1 + # treat factors coming in along current edge as 'left' + left = incoming[edges[din]] + other_dims_in = filter(dd -> dd != din, dims_in) + other_incoming = [incoming[edges[dd]] for dd in other_dims_in] + # treat all other factors as 'right' + right = vcat(other_incoming..., outgoing) + if !isempty(left) + bond_row = ITensors.posInLink!(leftmaps[edges[din]], left) + bond_col = ITensors.posInLink!(rightmaps[edges[din]], vcat(onsite, right)) # get incoming channel + bond_coef = one(ValType) + if ITensors.argument(term) ∉ bond_coef_done[v] + bond_coef *= convert(ValType, ITensors.coefficient(term)) + push!(bond_coef_done[v], ITensors.argument(term)) + end + push!(leftbond_coefs[edges[din]], ITensors.MatElem(bond_row, bond_col, bond_coef)) + end + T_inds[din] = bond_col + end + if !isnothing(dim_out) && !isempty(outgoing) + T_inds[dim_out] = ITensors.posInLink!(rightmaps[edge_out], outgoing) # add outgoing channel + end + # if term starts at this site, add its coefficient as a site factor + site_coef = one(C) + if (isempty(dims_in) || all(T_inds[dims_in] .== -1)) && + ITensors.argument(term) ∉ site_coef_done + site_coef = ITensors.coefficient(term) + push!(site_coef_done, ITensors.argument(term)) + end + # add onsite identity for interactions passing through vertex + if isempty(onsite) + if !ITensors.using_auto_fermion() && isfermionic(outgoing, sites) # TODO: check if fermions are actually supported here! + push!(onsite, Op("F", v)) + else + push!(onsite, Op("Id", v)) + end + end + # save indices and value of symbolic tensor entry + el = (MVector{ranks[v]}(T_inds), site_coef * Prod(onsite)) + push!(tempTTNO[v], el) + end + ITensors.remove_dups!(tempTTNO[v]) + # handle symbolic truncation (still something wrong with this) + for din in dims_in + if !isempty(leftbond_coefs[edges[din]]) + M = ITensors.toMatrix(leftbond_coefs[edges[din]]) + U, S, V = svd(M) + P = S .^ 2 + truncate!(P; maxdim=maxdim, cutoff=cutoff, mindim=mindim) + tdim = length(P) + nc = size(M, 2) + Vs[edges[din]] = Matrix{ValType}(V[1:nc, 1:tdim]) + end + end + end + + # compress this tempTTNO representation into dense form + + link_space = dictionary([ + e => Index((isempty(rightmaps[e]) ? 0 : size(Vs[e], 2)) + 2, edge_tag(e)) for e in es + ]) + + H = TTNO(sites) + + for v in vs # can I merge this with previous loop? no, need all need the Vs... + + # redo the whole thing like before + edges = filter(e -> dst(e) == v || src(e) == v, es) + dims_in = findall(e -> dst(e) == v, edges) + dim_out = findfirst(e -> src(e) == v, edges) + + # slice isometries at this vertex + Vv = [Vs[e] for e in edges] + + linkinds = [link_space[e] for e in edges] + linkdims = dim.(linkinds) + + H[v] = ITensor() + + for (T_inds, t) in tempTTNO[v] + (abs(coefficient(t)) > eps()) || continue + T = zeros(ValType, linkdims...) + ct = convert(ValType, coefficient(t)) + terminal_dims = findall(d -> T_inds[d] == -1, 1:ranks[v]) # directions in which term starts or ends + normal_dims = findall(d -> T_inds[d] ≠ -1, 1:ranks[v]) # normal dimensions, do truncation thingies + T_inds[terminal_dims] .= 1 # start in channel 1 + if !isnothing(dim_out) && dim_out ∈ terminal_dims + T_inds[dim_out] = linkdims[dim_out] # end in channel linkdims[d] for each dimension d + end + if isempty(normal_dims) + T[T_inds...] += ct # on-site term + else + # abracadabra? + dim_ranges = Tuple(size(Vv[d], 2) for d in normal_dims) + for c in CartesianIndices(dim_ranges) + z = ct + temp_inds = copy(T_inds) + for (i, d) in enumerate(normal_dims) + V_factor = Vv[d][T_inds[d], c[i]] + z *= (d ∈ dims_in ? conj(V_factor) : V_factor) + temp_inds[d] = 1 + c[i] + end + T[temp_inds...] += z + end + end + T = itensor(T, linkinds) + H[v] += T * computeSiteProd(sites, ITensors.argument(t)) + end + + # add starting and ending identity operators + idT = zeros(ValType, linkdims...) + if !isnothing(dim_out) + idT[ones(Int, ranks[v])...] = 1.0 # starting identity is easy + end + # ending identities not so much + idT_end_inds = ones(Int, ranks[v]) + if !isnothing(dim_out) + idT_end_inds[dim_out] = linkdims[dim_out] + end + for din in dims_in + idT_end_inds[din] = linkdims[din] + idT[idT_end_inds...] = 1 + idT_end_inds[din] = 1 # reset + end + T = itensor(idT, linkinds) + H[v] += T * computeSiteProd(sites, Prod([Op("Id", v)])) + end + + return H +end + +# +# Tree adaptations of functionalities in ITensors.jl/src/physics/autompo/opsum_to_mpo_generic.jl +# + +# TODO: fix quantum number and fermion support, definitely broken + +# needed an extra `only` compared to ITensors version since IndsNetwork has Vector{<:Index} +# as vertex data +function isfermionic(t::Vector{Op}, sites::IndsNetwork{<:Index}) + p = +1 + for op in t + if has_fermion_string(ITensors.name(op), only(sites[ITensors.site(op)])) + p *= -1 + end + end + return (p == -1) +end + +# only(site(ops[1])) in ITensors breaks for tuple site labels, had to drop the only +function computeSiteProd(sites::IndsNetwork{<:Index}, ops::Prod{Op})::ITensor + v = ITensors.site(ops[1]) + T = op(sites[v], ITensors.which_op(ops[1]); ITensors.params(ops[1])...) + for j in 2:length(ops) + (ITensors.site(ops[j]) != v) && error("Mismatch of vertex labels in computeSiteProd") + opj = op(sites[v], ITensors.which_op(ops[j]); ITensors.params(ops[j])...) + T = product(T, opj) + end + return T +end + +# changed `isless_site` to use tree vertex ordering relative to root +function sorteachterm(os::OpSum, sites::IndsNetwork{<:Index}, root_vertex::Tuple) + os = copy(os) + findpos(op::Op) = find_index_in_tree(op, sites, root_vertex) + isless_site(o1::Op, o2::Op) = findpos(o1) < findpos(o2) + N = nv(sites) + for n in eachindex(os) + t = os[n] + Nt = length(t) + + if !all(map(v -> has_vertex(sites, v), ITensors.sites(t))) + error( + "The OpSum contains a term $t that does not have support on the underlying graph." + ) + end + + prevsite = N + 1 #keep track of whether we are switching + #to a new site to make sure F string + #is only placed at most once for each site + + # Sort operators in t by site order, + # and keep the permutation used, perm, for analysis below + perm = Vector{Int}(undef, Nt) + sortperm!(perm, ITensors.terms(t); alg=InsertionSort, lt=isless_site) + + t = coefficient(t) * Prod(ITensors.terms(t)[perm]) + + # Identify fermionic operators, + # zeroing perm for bosonic operators, + # and inserting string "F" operators + parity = +1 + for n in Nt:-1:1 + currsite = ITensors.site(t[n]) + fermionic = has_fermion_string( + ITensors.which_op(t[n]), only(sites[ITensors.site(t[n])]) + ) + if !ITensors.using_auto_fermion() && (parity == -1) && (currsite < prevsite) + error("No verified fermion support for automatic TTNO constructor!") # no verified support, just throw error + # Put local piece of Jordan-Wigner string emanating + # from fermionic operators to the right + # (Remaining F operators will be put in by svdMPO) + terms(t)[n] = Op("$(ITensors.which_op(t[n])) * F", only(ITensors.site(t[n]))) + end + prevsite = currsite + + if fermionic + error("No verified fermion support for automatic TTNO constructor!") # no verified support, just throw error + parity = -parity + else + # Ignore bosonic operators in perm + # by zeroing corresponding entries + perm[n] = 0 + end + end + if parity == -1 + error("Parity-odd fermionic terms not yet supported by AutoTTNO") + end + + # Keep only fermionic op positions (non-zero entries) + filter!(!iszero, perm) + # and account for anti-commuting, fermionic operators + # during above sort; put resulting sign into coef + t *= ITensors.parity_sign(perm) + ITensors.terms(os)[n] = t + end + return os +end + +""" + TTNO(os::OpSum, sites::IndsNetwork{<:Index}; kwargs...) + TTNO(eltype::Type{<:Number}, os::OpSum, sites::IndsNetwork{<:Index}; kwargs...) + +Convert an OpSum object `os` to a TreeTensorNetworkOperator, with indices given by `sites`. +""" +function TTNO( + os::OpSum, + sites::IndsNetwork{<:Index}; + root_vertex::Tuple=default_root_vertex(sites), + splitblocks=false, + method::Symbol=:fsm, # default to construction from finite state machine with manual truncation until svdTTNO is fixed + kwargs..., +)::TTNO + length(ITensors.terms(os)) == 0 && error("OpSum has no terms") + is_tree(sites) || error("Site index graph must be a tree.") + is_leaf(sites, root_vertex) || error("Tree root must be a leaf vertex.") + + os = deepcopy(os) + os = sorteachterm(os, sites, root_vertex) + os = ITensors.sortmergeterms(os) # not exported + + if hasqns(first(first(vertex_data(sites)))) + error("No verified quantum number support for automatic TTNO constructor!") # no verified support, just throw error + end + if method == :svd + @warn "Symbolic SVD compression not working for long-range interactions." # add warning until this is fixed + T = svdTTNO(os, sites, root_vertex; kwargs...) + elseif method == :fsm + T = fsmTTNO(os, sites, root_vertex) + # see https://github.com/ITensor/ITensors.jl/issues/526 + lognormT = lognorm(T) + T /= exp(lognormT / nv(T)) # TODO: fix broadcasting for in-place assignment + truncate!(T; root_vertex, cutoff=1e-15) + T *= exp(lognormT / nv(T)) + end + if splitblocks + error("splitblocks not yet implemented for AbstractTreeTensorNetwork.") + T = ITensors.splitblocks(linkinds, T) # TODO: make this work + end + return T +end + +# Conversion from other formats +function TTNO(o::Op, s::IndsNetwork{<:Index}; kwargs...) + return TTNO(OpSum{Float64}() + o, s; kwargs...) +end + +function TTNO(o::Scaled{C,Op}, s::IndsNetwork{<:Index}; kwargs...) where {C} + return TTNO(OpSum{C}() + o, s; kwargs...) +end + +function TTNO(o::Sum{Op}, s::IndsNetwork{<:Index}; kwargs...) where {C} + return TTNO(OpSum{Float64}() + o, s; kwargs...) +end + +function TTNO(o::Prod{Op}, s::IndsNetwork{<:Index}; kwargs...) where {C} + return TTNO(OpSum{Float64}() + o, s; kwargs...) +end + +function TTNO(o::Scaled{C,Prod{Op}}, s::IndsNetwork{<:Index}; kwargs...) where {C} + return TTNO(OpSum{C}() + o, s; kwargs...) +end + +function TTNO(o::Sum{Scaled{C,Op}}, s::IndsNetwork{<:Index}; kwargs...) where {C} + return TTNO(OpSum{C}() + o, s; kwargs...) +end + +# Catch-all for leaf eltype specification +function TTNO(eltype::Type{<:Number}, os, sites::IndsNetwork{<:Index}; kwargs...) + return NDTensors.convert_scalartype(eltype, TTNO(os, sites; kwargs...)) +end diff --git a/src/treetensornetwork/projttno.jl b/src/treetensornetwork/projttno.jl new file mode 100644 index 00000000..83cc9282 --- /dev/null +++ b/src/treetensornetwork/projttno.jl @@ -0,0 +1,52 @@ +""" +ProjTTNO +""" +mutable struct ProjTTNO <: AbstractProjTTNO + pos::Union{Vector{<:Tuple},NamedDimEdge{Tuple}} # TODO: cleanest way to specify effective Hamiltonian position? + H::TTNO + environments::Dictionary{NamedDimEdge{Tuple},ITensor} +end +function ProjTTNO(H::TTNO) + return ProjTTNO(vertices(H), H, Dictionary{edgetype(H),ITensor}()) +end + +copy(P::ProjTTNO) = ProjTTNO(P.pos, copy(P.H), copy(P.environments)) + +# trivial if we choose to specify position as above; only kept to allow using alongside +# ProjMPO +function set_nsite!(P::ProjTTNO, nsite) + return P +end + +function make_environment!(P::ProjTTNO, psi::TTNS, e::NamedDimEdge{Tuple})::ITensor + # invalidate environment for opposite edge direction if necessary + reverse(e) ∈ incident_edges(P) || unset!(P.environments, reverse(e)) + # do nothing if valid environment already present + if haskey(P.environments, e) + env = environment(P, e) + else + if is_leaf(underlying_graph(P), src(e)) + # leaves are easy + env = psi[src(e)] * P.H[src(e)] * dag(prime(psi[src(e)])) + else + # construct by contracting neighbors + neighbor_envs = ITensor[] + for n in setdiff(neighbors(underlying_graph(P), src(e)), [dst(e)]) + push!(neighbor_envs, make_environment!(P, psi, edgetype(P)(n, src(e)))) + end + # manually heuristic for contraction order: two environments, site tensors, then + # other environments + frst, scnd, rst = _separate_first_two(neighbor_envs) + itensor_map = vcat(psi[src(e)], frst, scnd, P.H[src(e)], dag(prime(psi[src(e)])), rst) + # TODO: actually use optimal contraction sequence here + env = reduce(*, itensor_map) + end + # cache + set!(P.environments, e, env) + end + @assert( + hascommoninds(environment(P, e), psi[src(e)]), + "Something went wrong, probably re-orthogonalized this edge in the same direction twice!" + ) + return env +end diff --git a/src/treetensornetwork/projttnosum.jl b/src/treetensornetwork/projttnosum.jl new file mode 100644 index 00000000..9a1ec26a --- /dev/null +++ b/src/treetensornetwork/projttnosum.jl @@ -0,0 +1,61 @@ +""" +ProjTTNOSum +""" +mutable struct ProjTTNOSum + pm::Vector{ProjTTNO} +end + +copy(P::ProjTTNOSum) = ProjTTNOSum(copy.(P.pm)) + +ProjTTNOSum(ttnos::Vector{TTNO}) = ProjTTNOSum([ProjTTNO(M) for M in ttnos]) + +ProjTTNOSum(Ms::TTNO...) = ProjTTNOSum([Ms...]) + +on_edge(P::ProjTTNOSum) = on_edge(P.pm[1]) + +nsite(P::ProjTTNOSum) = nsite(P.pm[1]) + +function set_nsite!(Ps::ProjTTNOSum, nsite) + for P in Ps.pm + set_nsite!(P, nsite) + end + return Ps +end + +underlying_graph(P::ProjTTNOSum) = underlying_graph(P.pm[1]) + +Base.length(P::ProjTTNOSum) = length(P.pm[1]) + +sites(P::ProjTTNOSum) = sites(P.pm[1]) + +incident_edges(P::ProjTTNOSum) = incident_edges(P.pm[1]) + +internal_edges(P::ProjTTNOSum) = internal_edges(P.pm[1]) + +function product(P::ProjTTNOSum, v::ITensor)::ITensor + Pv = product(P.pm[1], v) + for n in 2:length(P.pm) + Pv += product(P.pm[n], v) + end + return Pv +end + +function Base.eltype(P::ProjTTNOSum) + elT = eltype(P.pm[1]) + for n in 2:length(P.pm) + elT = promote_type(elT, eltype(P.pm[n])) + end + return elT +end + +(P::ProjTTNOSum)(v::ITensor) = product(P, v) + +Base.size(P::ProjTTNOSum) = size(P.pm[1]) + +function position!( + P::ProjTTNOSum, psi::TTNS, pos::Union{Vector{<:Tuple},NamedDimEdge{Tuple}} +) + for M in P.pm + position!(M, psi, pos) + end +end diff --git a/src/treetensornetwork/treetensornetwork.jl b/src/treetensornetwork/treetensornetwork.jl deleted file mode 100644 index 11bfd16b..00000000 --- a/src/treetensornetwork/treetensornetwork.jl +++ /dev/null @@ -1,86 +0,0 @@ -abstract type AbstractTreeTensorNetwork <: AbstractITensorNetwork end - -function default_root_vertex(ϕ::AbstractTreeTensorNetwork, ψ::AbstractTreeTensorNetwork) - return first(vertices(ψ)) -end - -function inner( - ϕ::AbstractTreeTensorNetwork, - ψ::AbstractTreeTensorNetwork; - root_vertex=default_root_vertex(ϕ, ψ), -) - ϕᴴ = sim(dag(ψ); sites=[]) - ψ = sim(ψ; sites=[]) - ϕψ = ϕᴴ ⊗ ψ - # TODO: find the largest tensor and use it as - # the `root_vertex`. - root_vertex = first(vertices(ψ)) - for e in post_order_dfs_edges(ψ, root_vertex) - if has_vertex(ϕψ, 2, src(e)...) - ϕψ = contract(ϕψ, (2, src(e)...) => (1, src(e)...)) - end - ϕψ = contract(ϕψ, (1, src(e)...) => (1, dst(e)...)) - if has_vertex(ϕψ, 2, dst(e)...) - ϕψ = contract(ϕψ, (2, dst(e)...) => (1, dst(e)...)) - end - end - return ϕψ[1, root_vertex...][] -end - -function norm(ψ::AbstractTreeTensorNetwork) - return √(abs(real(inner(ψ, ψ)))) -end - -function orthogonalize(ψ::AbstractTreeTensorNetwork, root_vertex...) - for e in post_order_dfs_edges(ψ, root_vertex) - ψ = orthogonalize(ψ, e) - end - return ψ -end - -# For ambiguity error -function orthogonalize(tn::AbstractTreeTensorNetwork, edge::AbstractEdge; kwargs...) - return _orthogonalize_edge(tn, edge; kwargs...) -end - -""" - TreeTensorNetworkState <: AbstractITensorNetwork - -# Fields - -- itensor_network::ITensorNetwork -- ortho_lims::Vector{Tuple}: A vector of vertices defining the orthogonality limits. - -""" -struct TreeTensorNetworkState <: AbstractTreeTensorNetwork - itensor_network::ITensorNetwork - ortho_center::Vector{Tuple} - function TreeTensorNetworkState( - itensor_network::ITensorNetwork, ortho_center::Vector{Tuple}=vertices(itensor_network) - ) - @assert is_tree(itensor_network) - return new(itensor_network, ortho_center) - end -end - -function copy(ψ::TreeTensorNetworkState) - return TreeTensorNetworkState(copy(ψ.itensor_network), copy(ψ.ortho_center)) -end - -const TTNS = TreeTensorNetworkState - -# Field access -ITensorNetwork(ψ::TreeTensorNetworkState) = ψ.itensor_network - -# Constructor -function TreeTensorNetworkState(inds_network::IndsNetwork, args...; kwargs...) - return TreeTensorNetworkState(ITensorNetwork(inds_network; kwargs...), args...) -end - -function TreeTensorNetworkState(graph::AbstractGraph, args...; kwargs...) - itensor_network = ITensorNetwork(graph; kwargs...) - return TreeTensorNetworkState(itensor_network, args...) -end - -# Required for `AbstractITensorNetwork` interface -data_graph(ψ::TreeTensorNetworkState) = data_graph(ITensorNetwork(ψ)) diff --git a/src/treetensornetwork/ttno.jl b/src/treetensornetwork/ttno.jl new file mode 100644 index 00000000..f8029fea --- /dev/null +++ b/src/treetensornetwork/ttno.jl @@ -0,0 +1,148 @@ +""" + TreeTensorNetworkOperator <: AbstractITensorNetwork + +A finite size tree tensor network operator type. +Keeps track of the orthogonality center. + +# Fields + +- itensor_network::ITensorNetwork +- ortho_lims::Vector{Tuple}: A vector of vertices defining the orthogonality limits. +""" +mutable struct TreeTensorNetworkOperator <: AbstractTreeTensorNetwork + itensor_network::ITensorNetwork + ortho_center::Vector{Tuple} + function TreeTensorNetworkOperator( + itensor_network::ITensorNetwork, ortho_center::Vector{<:Tuple}=vertices(itensor_network) + ) + @assert is_tree(itensor_network) + return new(itensor_network, ortho_center) + end +end + +function copy(ψ::TreeTensorNetworkOperator) + return TreeTensorNetworkOperator(copy(ψ.itensor_network), copy(ψ.ortho_center)) +end + +const TTNO = TreeTensorNetworkOperator + +# Field access +ITensorNetwork(ψ::TreeTensorNetworkOperator) = ψ.itensor_network + +# Required for `AbstractITensorNetwork` interface +data_graph(ψ::TreeTensorNetworkOperator) = data_graph(ITensorNetwork(ψ)) + +# +# Constructor +# + +# catch-all for default ElType +function TreeTensorNetworkOperator(graph::AbstractGraph, args...; kwargs...) + return TreeTensorNetworkOperator(Float64, graph, args...; kwargs...) +end + +function TreeTensorNetworkOperator( + ::Type{ElT}, graph::AbstractGraph, args...; kwargs... +) where {ElT<:Number} + itensor_network = ITensorNetwork(ElT, graph; kwargs...) + return TreeTensorNetworkOperator(itensor_network, args...) +end + +# +# Expectation values +# + +# TODO: implement using multi-graph disjoint union +function inner(y::TTNS, A::TTNO, x::TTNS; root_vertex=default_root_vertex(x, A, y)) + traversal_order = reverse(post_order_dfs_vertices(x, root_vertex)) + check_hascommoninds(siteinds, A, x) + check_hascommoninds(siteinds, A, y) + ydag = sim(dag(y); sites=[]) + x = sim(x; sites=[]) + O = ydag[root_vertex] * A[root_vertex] * x[root_vertex] + for v in traversal_order[2:end] + O = O * ydag[v] * A[v] * x[v] + end + return O[] +end + +# TODO: implement using multi-graph disjoint +function inner( + B::TTNO, y::TTNS, A::TTNO, x::TTNS; root_vertex=default_root_vertex(B, y, A, x) +) + N = nv(B) + if nv(y) != N || nv(x) != N || nv(A) != N + throw( + DimensionMismatch( + "inner: mismatched number of vertices $N and $(nv(x)) or $(nv(y)) or $(nv(A))" + ), + ) + end + check_hascommoninds(siteinds, A, x) + check_hascommoninds(siteinds, B, y) + for v in vertices(B) + !hascommoninds( + uniqueinds(siteinds(A, v), siteinds(x, v)), uniqueinds(siteinds(B, v), siteinds(y, v)) + ) && error( + "$(typeof(x)) Ax and $(typeof(y)) By must share site indices. On site $v, Ax has site indices $(uniqueinds(siteinds(A, v), (siteinds(x, v)))) while By has site indices $(uniqueinds(siteinds(B, v), siteinds(y, v))).", + ) + end + ydag = sim(linkinds, dag(y)) + Bdag = sim(linkinds, dag(B)) + traversal_order = reverse(post_order_dfs_vertices(x, root_vertex)) + yB = ydag[root_vertex] * Bdag[root_vertex] + Ax = A[root_vertex] * x[root_vertex] + O = yB * Ax + for v in traversal_order[2:end] + yB = ydag[v] * Bdag[v] + Ax = A[v] * x[v] + yB *= O + O = yB * Ax + end + return O[] +end + +# +# Construction from operator (map) +# + +function TTNO( + ::Type{ElT}, sites::IndsNetwork, ops::Dictionary; kwargs... +) where {ElT<:Number} + N = nv(sites) + os = Prod{Op}() + for v in vertices(sites) + os *= Op(ops[v], v) + end + T = TTNO(ElT, os, sites; kwargs...) + # see https://github.com/ITensor/ITensors.jl/issues/526 + lognormT = lognorm(T) + T /= exp(lognormT / N) # TODO: fix broadcasting for in-place assignment + truncate!(T; cutoff=1e-15) + T *= exp(lognormT / N) + return T +end + +function TTNO( + ::Type{ElT}, sites::IndsNetwork, fops::Function; kwargs... +) where {ElT<:Number} + ops = Dictionary(vertices(sites), map(v -> fops(v), vertices(sites))) + return TTNO(ElT, sites, ops; kwargs...) +end + +function TTNO(::Type{ElT}, sites::IndsNetwork, op::String; kwargs...) where {ElT<:Number} + ops = Dictionary(vertices(sites), fill(op, nv(sites))) + return TTNO(ElT, sites, ops; kwargs...) +end + +# +# Conversion +# + +function convert(::Type{TTNS}, T::TTNO) + return TTNS(ITensorNetwork(T), ortho_center(T)) +end + +function convert(::Type{TTNO}, T::TTNS) + return TTNO(ITensorNetwork(T), ortho_center(T)) +end diff --git a/src/treetensornetwork/ttns.jl b/src/treetensornetwork/ttns.jl new file mode 100644 index 00000000..0068054c --- /dev/null +++ b/src/treetensornetwork/ttns.jl @@ -0,0 +1,128 @@ +""" + TreeTensorNetworkState <: AbstractITensorNetwork + +# Fields + +- itensor_network::ITensorNetwork +- ortho_lims::Vector{Tuple}: A vector of vertices defining the orthogonality limits. + +""" +mutable struct TreeTensorNetworkState <: AbstractTreeTensorNetwork + itensor_network::ITensorNetwork + ortho_center::Vector{Tuple} + function TreeTensorNetworkState( + itensor_network::ITensorNetwork, ortho_center::Vector{<:Tuple}=vertices(itensor_network) + ) + @assert is_tree(itensor_network) + return new(itensor_network, ortho_center) + end +end + +function copy(ψ::TreeTensorNetworkState) + return TreeTensorNetworkState(copy(ψ.itensor_network), copy(ψ.ortho_center)) +end + +const TTNS = TreeTensorNetworkState + +# Field access +ITensorNetwork(ψ::TreeTensorNetworkState) = ψ.itensor_network + +# Required for `AbstractITensorNetwork` interface +data_graph(ψ::TreeTensorNetworkState) = data_graph(ITensorNetwork(ψ)) + +# +# Constructor +# + +# catch-all for default ElType +function TreeTensorNetworkState(g::AbstractGraph, args...; kwargs...) + return TreeTensorNetworkState(Float64, g, args...; kwargs...) +end + +# can defer almost everything to ITensorNework constructor +function TreeTensorNetworkState( + ::Type{ElT}, graph::AbstractGraph, args...; kwargs... +) where {ElT<:Number} + itensor_network = ITensorNetwork(ElT, graph; kwargs...) + return TreeTensorNetworkState(itensor_network, args...) +end + +# construct from given state (map) +function TreeTensorNetworkState( + ::Type{ElT}, is::IndsNetwork, states, args... +) where {ElT<:Number} + itensor_network = ITensorNetwork(ElT, is, states) + return TreeTensorNetworkState(itensor_network, args...) +end + +# TODO: randomcircuitTTNS? +function randomTTNS(args...; kwargs...) + T = TTNS(args...; kwargs...) + randn!.(vertex_data(T)) + normalize!.(vertex_data(T)) + return T +end + +function productTTNS(args...; kwargs...) + return TTNS(args...; link_space=1, kwargs...) +end + +# +# Utility +# + +function replacebond!(T::TTNS, edge::AbstractEdge, phi::ITensor; kwargs...) + ortho::String = get(kwargs, :ortho, "left") + swapsites::Bool = get(kwargs, :swapsites, false) + which_decomp::Union{String,Nothing} = get(kwargs, :which_decomp, nothing) + normalize::Bool = get(kwargs, :normalize, false) + + indsTe = inds(T[src(edge)]) + if swapsites + sb = siteinds(M, src(edge)) + sbp1 = siteinds(M, dst(edge)) + indsTe = replaceinds(indsTe, sb, sbp1) + end + + L, R, spec = factorize( + phi, indsTe; which_decomp=which_decomp, tags=tags(T, edge), kwargs... + ) + + T[src(edge)] = L + T[dst(edge)] = R + if ortho == "left" + normalize && (T[dst(edge)] ./= norm(T[dst(edge)])) + isortho(T) && set_ortho_center!(T, [dst(edge)]) + elseif ortho == "right" + normalize && (T[src(edge)] ./= norm(T[src(edge)])) + isortho(T) && set_ortho_center!(T, [src(edge)]) + end + return spec +end + +function replacebond!(T::TTNS, edge::Pair, phi::ITensor; kwargs...) + return replacebond!(T, edgetype(T)(edge), phi; kwargs...) +end + +function replacebond(T0::TTNS, args...; kwargs...) + return replacebond!(copy(T0), args...; kwargs...) +end + +# +# Expectation values +# + +# TODO: temporary patch, to be implemented properly +function expect(psi::TTNS, opname::String; kwargs...) + s = siteinds(psi) + sites = get(kwargs, :sites, vertices(psi)) + res = Dictionary(sites, Vector{ComplexF64}(undef, length(sites))) + norm2_psi = inner(psi, psi) + for v in sites + Opsi = copy(psi) + Opsi[v] *= op(opname, s[v]) + noprime!(Opsi[v]) + res[v] = inner(psi, Opsi) / norm2_psi + end + return res +end diff --git a/src/utility.jl b/src/utility.jl new file mode 100644 index 00000000..5d155f64 --- /dev/null +++ b/src/utility.jl @@ -0,0 +1,17 @@ +""" +Relabel sites in OpSum according to given site map +""" +function relabel_sites(O::OpSum, vmap::AbstractDictionary) + Oout = OpSum() + for term in Ops.terms(O) + c = Ops.coefficient(term) + p = Ops.argument(term) + # swap sites for every Op in product and multiply resulting Ops + pout = prod([ + Op(Ops.which_op(o), map(v -> vmap[v], Ops.sites(o))...; Ops.params(o)...) for o in p + ]) + # add to new OpSum + Oout += c * pout + end + return Oout +end diff --git a/test/Manifest.toml b/test/Manifest.toml index bdde109c..ef24cbb4 100644 --- a/test/Manifest.toml +++ b/test/Manifest.toml @@ -1,7 +1,8 @@ # This file is machine-generated - editing it directly is not advised -julia_version = "1.7.2" +julia_version = "1.8.0-rc3" manifest_format = "2.0" +project_hash = "e7d2e1ca582bfd915f1ed57071b99f80ba7b64a0" [[deps.AbstractTrees]] git-tree-sha1 = "03e0550477d86222521d254b741d470ba17ea0b5" @@ -10,12 +11,13 @@ version = "0.3.4" [[deps.Adapt]] deps = ["LinearAlgebra"] -git-tree-sha1 = "af92965fb30777147966f58acb05da51c5616b5f" +git-tree-sha1 = "195c5505521008abea5aee4f96930717958eac6f" uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" -version = "3.3.3" +version = "3.4.0" [[deps.ArgTools]] uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" +version = "1.1.1" [[deps.ArnoldiMethod]] deps = ["LinearAlgebra", "Random", "StaticArrays"] @@ -29,39 +31,69 @@ uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" [[deps.Base64]] uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" -[[deps.Blosc]] -deps = ["Blosc_jll"] -git-tree-sha1 = "575bdd70552dd9a7eaeba08ef2533226cdc50779" -uuid = "a74b3585-a348-5f62-a45c-50e91977d574" -version = "0.7.2" +[[deps.BitIntegers]] +deps = ["Random"] +git-tree-sha1 = "5a814467bda636f3dde5c4ef83c30dd0a19928e0" +uuid = "c3b6d118-76ef-56ca-8cc7-ebb389d030a1" +version = "0.2.6" + +[[deps.Bzip2_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "19a35467a82e236ff51bc17a3a44b69ef35185a2" +uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0" +version = "1.0.8+0" -[[deps.Blosc_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Lz4_jll", "Pkg", "Zlib_jll", "Zstd_jll"] -git-tree-sha1 = "91d6baa911283650df649d0aea7c28639273ae7b" -uuid = "0b7ba130-8d10-5ba8-a3d6-c5182647fed9" -version = "1.21.1+0" +[[deps.CEnum]] +git-tree-sha1 = "eb4cb44a499229b3b8426dcfb5dd85333951ff90" +uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82" +version = "0.4.2" [[deps.ChainRulesCore]] deps = ["Compat", "LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "6e39c91fb4b84dcb870813c91674bdebb9145895" +git-tree-sha1 = "e7ff6cadf743c098e08fca25c91103ee4303c9bb" uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -version = "1.11.5" +version = "1.15.6" [[deps.ChangesOfVariables]] deps = ["ChainRulesCore", "LinearAlgebra", "Test"] -git-tree-sha1 = "bf98fa45a0a4cee295de98d4c1462be26345b9a1" +git-tree-sha1 = "38f7a08f19d8810338d4f5085211c7dfa5d5bdd8" uuid = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0" -version = "0.1.2" +version = "0.1.4" + +[[deps.ColorTypes]] +deps = ["FixedPointNumbers", "Random"] +git-tree-sha1 = "eb7f0f8307f71fac7c606984ea5fb2817275d6e4" +uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f" +version = "0.11.4" + +[[deps.ColorVectorSpace]] +deps = ["ColorTypes", "FixedPointNumbers", "LinearAlgebra", "SpecialFunctions", "Statistics", "TensorCore"] +git-tree-sha1 = "d08c20eef1f2cbc6e60fd3612ac4340b89fea322" +uuid = "c3611d14-8923-5661-9e6a-0046d554d3a4" +version = "0.9.9" + +[[deps.Colors]] +deps = ["ColorTypes", "FixedPointNumbers", "Reexport"] +git-tree-sha1 = "417b0ed7b8b838aa6ca0a87aadf1bb9eb111ce40" +uuid = "5ae59095-9a9b-59fe-a467-6f913c188581" +version = "0.12.8" [[deps.Compat]] deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] -git-tree-sha1 = "44c37b4636bc54afac5c574d2d02b625349d6582" +git-tree-sha1 = "78bee250c6826e1cf805a88b7f1e86025275d208" uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "3.41.0" +version = "3.46.0" [[deps.CompilerSupportLibraries_jll]] deps = ["Artifacts", "Libdl"] uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" +version = "0.5.2+0" + +[[deps.ConstructionBase]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "fb21ddd70a051d882a1686a5a550990bbe371a95" +uuid = "187b0558-2788-49d3-abe0-74a17ed4e7c9" +version = "1.4.1" [[deps.Contour]] deps = ["StaticArrays"] @@ -70,20 +102,20 @@ uuid = "d38c429a-6771-53c6-b99e-75d170b6e991" version = "0.5.7" [[deps.Crayons]] -git-tree-sha1 = "b618084b49e78985ffa8422f32b9838e397b9fc2" +git-tree-sha1 = "249fe38abf76d48563e2f4556bebd215aa317e15" uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f" -version = "4.1.0" +version = "4.1.1" [[deps.DataAPI]] -git-tree-sha1 = "cc70b17275652eb47bc9e5f81635981f13cea5c8" +git-tree-sha1 = "46d2680e618f8abd007bce0c3026cb0c4a8f2032" uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" -version = "1.9.0" +version = "1.12.0" [[deps.DataStructures]] deps = ["Compat", "InteractiveUtils", "OrderedCollections"] -git-tree-sha1 = "3daef5523dd2e769dad2365274f760ff5f282c7d" +git-tree-sha1 = "d1fff3a548102f48987a52a2e0d114fa97d730f0" uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" -version = "0.18.11" +version = "0.18.13" [[deps.DataValueInterfaces]] git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" @@ -99,10 +131,10 @@ deps = ["Mmap"] uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" [[deps.Dictionaries]] -deps = ["Indexing", "Random"] -git-tree-sha1 = "66bde31636301f4d217a161cabe42536fa754ec8" +deps = ["Indexing", "Random", "Serialization"] +git-tree-sha1 = "96dc5c5c8994be519ee3420953c931c55657a3f2" uuid = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" -version = "0.3.17" +version = "0.3.24" [[deps.Distributed]] deps = ["Random", "Serialization", "Sockets"] @@ -110,80 +142,117 @@ uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" [[deps.DocStringExtensions]] deps = ["LibGit2"] -git-tree-sha1 = "b19534d1895d702889b219c382a6e18010797f0b" +git-tree-sha1 = "5158c2b41018c5f7eb1470d558127ac274eca0c9" uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" -version = "0.8.6" +version = "0.9.1" [[deps.Downloads]] -deps = ["ArgTools", "LibCURL", "NetworkOptions"] +deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" +version = "1.6.0" [[deps.EarCut_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "3f3a2501fa7236e9b911e0f7a588c657e822bb6d" +git-tree-sha1 = "e3290f2d49e661fbd94046d7e3726ffcb2d41053" uuid = "5ae413db-bbd1-5e63-b57d-d24a61df00f5" -version = "2.2.3+0" +version = "2.2.4+0" [[deps.ExprTools]] -git-tree-sha1 = "24565044e60bc48a7562e75bcf14f084901dc0b6" +git-tree-sha1 = "56559bbef6ca5ea0c0818fa5c90320398a6fbf8d" uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04" -version = "0.1.7" +version = "0.1.8" + +[[deps.Extents]] +git-tree-sha1 = "5e1e4c53fa39afe63a7d356e30452249365fba99" +uuid = "411431e0-e8b7-467b-b5e0-f676ba4f2910" +version = "0.1.1" [[deps.FileIO]] deps = ["Pkg", "Requires", "UUIDs"] -git-tree-sha1 = "67551df041955cc6ee2ed098718c8fcd7fc7aebe" +git-tree-sha1 = "94f5101b96d2d968ace56f7f2db19d0a5f592e28" uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" -version = "1.12.0" +version = "1.15.0" + +[[deps.FileWatching]] +uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" + +[[deps.FixedPointNumbers]] +deps = ["Statistics"] +git-tree-sha1 = "335bfdceacc84c5cdf16aadc768aa5ddfc5383cc" +uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93" +version = "0.8.4" + +[[deps.FreeType]] +deps = ["CEnum", "FreeType2_jll"] +git-tree-sha1 = "cabd77ab6a6fdff49bfd24af2ebe76e6e018a2b4" +uuid = "b38be410-82b0-50bf-ab77-7b57e271db43" +version = "4.0.0" + +[[deps.FreeType2_jll]] +deps = ["Artifacts", "Bzip2_jll", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"] +git-tree-sha1 = "87eb71354d8ec1a96d4a7636bd57a7347dde3ef9" +uuid = "d7e528f0-a631-5988-bf34-fe36492bcfd7" +version = "2.10.4+0" + +[[deps.FreeTypeAbstraction]] +deps = ["ColorVectorSpace", "Colors", "FreeType", "GeometryBasics"] +git-tree-sha1 = "b5c7fe9cea653443736d264b85466bad8c574f4a" +uuid = "663a7486-cb36-511b-a19d-713bb74d65c9" +version = "0.9.9" + +[[deps.Functors]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "a2657dd0f3e8a61dbe70fc7c122038bd33790af5" +uuid = "d9f16b24-f501-4c13-a1f2-28368ffc5196" +version = "0.3.0" + +[[deps.GeoInterface]] +deps = ["Extents"] +git-tree-sha1 = "fb28b5dc239d0174d7297310ef7b84a11804dfab" +uuid = "cf35fbd7-0cd7-5166-be24-54bfbe79505f" +version = "1.0.1" [[deps.GeometryBasics]] -deps = ["EarCut_jll", "IterTools", "LinearAlgebra", "StaticArrays", "StructArrays", "Tables"] -git-tree-sha1 = "58bcdf5ebc057b085e58d95c138725628dd7453c" +deps = ["EarCut_jll", "GeoInterface", "IterTools", "LinearAlgebra", "StaticArrays", "StructArrays", "Tables"] +git-tree-sha1 = "12a584db96f1d460421d5fb8860822971cdb8455" uuid = "5c1252a2-5f33-56bf-86c9-59e7332b4326" -version = "0.4.1" +version = "0.4.4" [[deps.Graphs]] deps = ["ArnoldiMethod", "Compat", "DataStructures", "Distributed", "Inflate", "LinearAlgebra", "Random", "SharedArrays", "SimpleTraits", "SparseArrays", "Statistics"] -git-tree-sha1 = "d727758173afef0af878b29ac364a0eca299fc6b" +git-tree-sha1 = "ba2d094a88b6b287bd25cfa86f301e7693ffae2f" uuid = "86223c79-3864-5bf0-83f7-82e725a168b6" -version = "1.5.1" +version = "1.7.4" [[deps.HDF5]] -deps = ["Blosc", "Compat", "HDF5_jll", "Libdl", "Mmap", "Random", "Requires"] -git-tree-sha1 = "698c099c6613d7b7f151832868728f426abe698b" +deps = ["Compat", "HDF5_jll", "Libdl", "Mmap", "Random", "Requires"] +git-tree-sha1 = "899f041bf330ebeead3637073b2ca7477760edde" uuid = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" -version = "0.15.7" +version = "0.16.11" [[deps.HDF5_jll]] deps = ["Artifacts", "JLLWrappers", "LibCURL_jll", "Libdl", "OpenSSL_jll", "Pkg", "Zlib_jll"] -git-tree-sha1 = "bab67c0d1c4662d2c4be8c6007751b0b6111de5c" +git-tree-sha1 = "4cc2bb72df6ff40b055295fdef6d92955f9dede8" uuid = "0234f1f7-429e-5d53-9886-15a909be8d59" -version = "1.12.1+0" - -[[deps.ITensorNetworks]] -deps = ["ITensors"] -git-tree-sha1 = "aa46404f09d7107f40cc068b2b2dfcd947f12b48" -repo-rev = "main" -repo-url = "https://github.com/mtfishman/ITensorNetworks.jl" -uuid = "2919e153-833c-4bdc-8836-1ea460a35fc7" -version = "0.1.0" +version = "1.12.2+2" [[deps.ITensorUnicodePlots]] -deps = ["Graphs", "ITensorVisualizationBase", "ITensors", "LinearAlgebra", "NetworkLayout", "Reexport", "Statistics", "UnicodePlots"] -git-tree-sha1 = "fd73ee12c4ecf442360719b9ecf02b28c1500657" +deps = ["Graphs", "ITensorVisualizationBase", "LinearAlgebra", "NetworkLayout", "Reexport", "Statistics", "UnicodePlots"] +git-tree-sha1 = "40ea02ab983dba9ece488433f55cf58a84b1d874" uuid = "73163f41-4a9e-479f-8353-73bf94dbd758" -version = "0.1.0" +version = "0.1.2" [[deps.ITensorVisualizationBase]] deps = ["AbstractTrees", "Compat", "GeometryBasics", "Graphs", "ITensors", "LinearAlgebra", "MetaGraphs", "NetworkLayout", "SparseArrays", "Statistics"] -git-tree-sha1 = "fb3d48ecaff23a857b2525fed73217d737f92811" +git-tree-sha1 = "f605699ee003bc0bb4f5bb83bc6b3f13f66eb00b" uuid = "cd2553d2-8bef-4d93-8a38-c62f17d5ad23" -version = "0.1.0" +version = "0.1.4" [[deps.ITensors]] -deps = ["ChainRulesCore", "Compat", "Dictionaries", "HDF5", "KrylovKit", "LinearAlgebra", "LinearMaps", "NDTensors", "PackageCompiler", "Pkg", "Printf", "Random", "Requires", "SerializedElementArrays", "StaticArrays", "Strided", "TimerOutputs", "TupleTools", "Zeros", "ZygoteRules"] -git-tree-sha1 = "8e99d15c467e62cc290e10438c7ebb05052aac74" +deps = ["Adapt", "BitIntegers", "ChainRulesCore", "Compat", "Dictionaries", "Functors", "HDF5", "IsApprox", "KrylovKit", "LinearAlgebra", "LinearMaps", "NDTensors", "PackageCompiler", "Pkg", "Printf", "Random", "Requires", "SerializedElementArrays", "SimpleTraits", "StaticArrays", "Strided", "TimerOutputs", "TupleTools", "Zeros", "ZygoteRules"] +git-tree-sha1 = "abf22bd129d73c0e46eebd82af1865e66cbbe437" uuid = "9136182c-28ba-11e9-034c-db9fb085ebd5" -version = "0.2.12" +version = "0.3.20" [[deps.Indexing]] git-tree-sha1 = "ce1566720fd6b19ff3411404d4b977acd4814f9f" @@ -191,9 +260,9 @@ uuid = "313cdc1a-70c2-5d6a-ae34-0150d3930a38" version = "1.1.1" [[deps.Inflate]] -git-tree-sha1 = "f5fc07d4e706b84f72d54eedcc1c13d92fb0871c" +git-tree-sha1 = "5cd07aab533df5170988219191dfad0519391428" uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9" -version = "0.1.2" +version = "0.1.3" [[deps.InteractiveUtils]] deps = ["Markdown"] @@ -201,15 +270,21 @@ uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" [[deps.InverseFunctions]] deps = ["Test"] -git-tree-sha1 = "a7254c0acd8e62f1ac75ad24d5db43f5f19f3c65" +git-tree-sha1 = "49510dfcb407e572524ba94aeae2fced1f3feb0f" uuid = "3587e190-3f89-42d0-90ee-14403ec27112" -version = "0.1.2" +version = "0.1.8" [[deps.IrrationalConstants]] git-tree-sha1 = "7fd44fd4ff43fc60815f8e764c0f352b83c49151" uuid = "92d709cd-6900-40b7-9082-c6be49f344b6" version = "0.1.1" +[[deps.IsApprox]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "7627fa9b4c822a2b3ab8d8d39639e0a889a9758e" +uuid = "28f27b66-4bd8-47e7-9110-e2746eb8bed7" +version = "0.1.5" + [[deps.IterTools]] git-tree-sha1 = "fa6287a4469f5e048d763df38279ee729fbd44e5" uuid = "c8e1da08-722c-5040-9ed9-7db0dc04731e" @@ -221,34 +296,41 @@ uuid = "82899510-4779-5014-852e-03e436cf321d" version = "1.0.0" [[deps.JLD2]] -deps = ["DataStructures", "FileIO", "MacroTools", "Mmap", "Pkg", "Printf", "Reexport", "TranscodingStreams", "UUIDs"] -git-tree-sha1 = "09ef0c32a26f80b465d808a1ba1e85775a282c97" +deps = ["FileIO", "MacroTools", "Mmap", "OrderedCollections", "Pkg", "Printf", "Reexport", "TranscodingStreams", "UUIDs"] +git-tree-sha1 = "0d0ad913e827d13c5e88a73f9333d7e33c424576" uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819" -version = "0.4.17" +version = "0.4.24" [[deps.JLLWrappers]] deps = ["Preferences"] -git-tree-sha1 = "22df5b96feef82434b07327e2d3c770a9b21e023" +git-tree-sha1 = "abc9885a7ca2052a736a600f7fa66209f96506e1" uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" -version = "1.4.0" +version = "1.4.1" [[deps.KrylovKit]] deps = ["LinearAlgebra", "Printf"] -git-tree-sha1 = "0328ad9966ae29ccefb4e1b9bfd8c8867e4360df" +git-tree-sha1 = "49b0c1dd5c292870577b8f58c51072bd558febb9" uuid = "0b1a1467-8014-51b9-945f-bf0ae24f4b77" -version = "0.5.3" +version = "0.5.4" [[deps.LazyArtifacts]] deps = ["Artifacts", "Pkg"] uuid = "4af54fe1-eca0-43a8-85a7-787d91b784e3" +[[deps.LazyModules]] +git-tree-sha1 = "a560dd966b386ac9ae60bdd3a3d3a326062d3c3e" +uuid = "8cdb02fc-e678-4876-92c5-9defec4f444e" +version = "0.3.1" + [[deps.LibCURL]] deps = ["LibCURL_jll", "MozillaCACerts_jll"] uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" +version = "0.6.3" [[deps.LibCURL_jll]] deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" +version = "7.83.1+1" [[deps.LibGit2]] deps = ["Base64", "NetworkOptions", "Printf", "SHA"] @@ -257,6 +339,7 @@ uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" [[deps.LibSSH2_jll]] deps = ["Artifacts", "Libdl", "MbedTLS_jll"] uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" +version = "1.10.2+0" [[deps.Libdl]] uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" @@ -266,32 +349,32 @@ deps = ["Libdl", "libblastrampoline_jll"] uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" [[deps.LinearMaps]] -deps = ["LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "dbb14c604fc47aa4f2e19d0ebb7b6416f3cfa5f5" +deps = ["LinearAlgebra", "SparseArrays", "Statistics"] +git-tree-sha1 = "d1b46faefb7c2f48fdec69e6f3cc34857769bc15" uuid = "7a12625a-238d-50fd-b39a-03d52299707e" -version = "3.5.1" +version = "3.8.0" [[deps.LogExpFunctions]] deps = ["ChainRulesCore", "ChangesOfVariables", "DocStringExtensions", "InverseFunctions", "IrrationalConstants", "LinearAlgebra"] -git-tree-sha1 = "e5718a00af0ab9756305a0392832c8952c7426c1" +git-tree-sha1 = "94d9c52ca447e23eac0c0f074effbcd38830deb5" uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -version = "0.3.6" +version = "0.3.18" [[deps.Logging]] uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" -[[deps.Lz4_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "5d494bc6e85c4c9b626ee0cab05daa4085486ab1" -uuid = "5ced341a-0733-55b8-9ab6-a4889d929147" -version = "1.9.3+0" - [[deps.MacroTools]] deps = ["Markdown", "Random"] git-tree-sha1 = "3d3e902b31198a27340d0bf00d6ac452866021cf" uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" version = "0.5.9" +[[deps.MarchingCubes]] +deps = ["SnoopPrecompile", "StaticArrays"] +git-tree-sha1 = "ffc66942498a5f0d02b9e7b1b1af0f5873142cdc" +uuid = "299715c1-40a9-479a-aaf9-4a633d36f717" +version = "0.1.4" + [[deps.Markdown]] deps = ["Base64"] uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" @@ -299,6 +382,7 @@ uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" [[deps.MbedTLS_jll]] deps = ["Artifacts", "Libdl"] uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" +version = "2.28.0+0" [[deps.MetaGraphs]] deps = ["Graphs", "JLD2", "Random"] @@ -317,12 +401,19 @@ uuid = "a63ad114-7e13-5084-954f-fe012c677804" [[deps.MozillaCACerts_jll]] uuid = "14a3606d-f60d-562e-9121-12d972cd8159" +version = "2022.2.1" [[deps.NDTensors]] -deps = ["Compat", "Dictionaries", "HDF5", "LinearAlgebra", "Random", "Requires", "StaticArrays", "Strided", "TimerOutputs", "TupleTools"] -git-tree-sha1 = "061056112183f05acee6afff89ddf8b77f9c88d8" +deps = ["Adapt", "Compat", "Dictionaries", "Functors", "HDF5", "LinearAlgebra", "Random", "Requires", "SimpleTraits", "StaticArrays", "Strided", "TimerOutputs", "TupleTools"] +git-tree-sha1 = "c04da1fe76f68452be95a1a340b5b806bf7da13f" uuid = "23ae76d9-e61a-49c4-8f12-3f1a16adf9cf" -version = "0.1.33" +version = "0.1.44" + +[[deps.NaNMath]] +deps = ["OpenLibm_jll"] +git-tree-sha1 = "a7c3d1da1189a1c2fe843a3bfa04d18d20eb3211" +uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" +version = "1.0.1" [[deps.NetworkLayout]] deps = ["GeometryBasics", "LinearAlgebra", "Random", "Requires", "SparseArrays"] @@ -332,16 +423,29 @@ version = "0.4.4" [[deps.NetworkOptions]] uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +version = "1.2.0" [[deps.OpenBLAS_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" +version = "0.3.20+0" + +[[deps.OpenLibm_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "05823500-19ac-5b8b-9628-191a04bc5112" +version = "0.8.1+0" [[deps.OpenSSL_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "648107615c15d4e09f7eca16307bc821c1f718d8" +git-tree-sha1 = "e60321e3f2616584ff98f0a4f18d98ae6f89bbb3" uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" -version = "1.1.13+0" +version = "1.1.17+0" + +[[deps.OpenSpecFun_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1" +uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" +version = "0.5.5+0" [[deps.OrderedCollections]] git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c" @@ -349,20 +453,21 @@ uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" version = "1.4.1" [[deps.PackageCompiler]] -deps = ["Artifacts", "LazyArtifacts", "Libdl", "Pkg", "RelocatableFolders", "UUIDs"] -git-tree-sha1 = "a16924b37299cc7d6106fac255b44a8c79c7c21f" +deps = ["Artifacts", "LazyArtifacts", "Libdl", "Pkg", "Printf", "RelocatableFolders", "TOML", "UUIDs"] +git-tree-sha1 = "c497e2bb9c2127a411b74dbff56b11f258d67d12" uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d" -version = "1.7.7" +version = "2.0.9" [[deps.Pkg]] deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.8.0" [[deps.Preferences]] deps = ["TOML"] -git-tree-sha1 = "2cf929d64681236a2e074ffafb8d568733d2e6af" +git-tree-sha1 = "47e5f437cc0e7ef2ce8406ce1e7e24d44915f88d" uuid = "21216c6a-2e73-6563-6e65-726566657250" -version = "1.2.3" +version = "1.3.0" [[deps.Printf]] deps = ["Unicode"] @@ -395,12 +500,13 @@ version = "1.3.0" [[deps.SHA]] uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" +version = "0.7.0" [[deps.Scratch]] deps = ["Dates"] -git-tree-sha1 = "0b4b7f1393cff97c33891da2a0bf69c6ed241fda" +git-tree-sha1 = "f94f779c94e58bf9ea243e77a37e16d9de9126bd" uuid = "6c6a2e73-6563-6170-7368-637461726353" -version = "1.1.0" +version = "1.1.1" [[deps.Serialization]] uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" @@ -421,6 +527,11 @@ git-tree-sha1 = "5d7e3f4e11935503d3ecaf7186eac40602e7d231" uuid = "699a6c99-e7fa-54fc-8d76-47d257e15c1d" version = "0.9.4" +[[deps.SnoopPrecompile]] +git-tree-sha1 = "f604441450a3c0569830946e5b33b78c928e1a85" +uuid = "66db9d55-30c0-4569-8b51-7e840670fc0c" +version = "1.0.1" + [[deps.Sockets]] uuid = "6462fe0b-24de-5631-8697-dd941f90decc" @@ -434,42 +545,55 @@ version = "1.0.1" deps = ["LinearAlgebra", "Random"] uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" +[[deps.SpecialFunctions]] +deps = ["ChainRulesCore", "IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"] +git-tree-sha1 = "d75bda01f8c31ebb72df80a46c88b25d1c79c56d" +uuid = "276daf66-3868-5448-9aa4-cd146d93841b" +version = "2.1.7" + [[deps.StaticArrays]] -deps = ["LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "2ae4fe21e97cd13efd857462c1869b73c9f61be3" +deps = ["LinearAlgebra", "Random", "StaticArraysCore", "Statistics"] +git-tree-sha1 = "f86b3a049e5d05227b10e15dbb315c5b90f14988" uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.3.2" +version = "1.5.9" + +[[deps.StaticArraysCore]] +git-tree-sha1 = "6b7ba252635a5eff6a0b0664a41ee140a1c9e72a" +uuid = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" +version = "1.4.0" [[deps.Statistics]] deps = ["LinearAlgebra", "SparseArrays"] uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" [[deps.StatsAPI]] -git-tree-sha1 = "d88665adc9bcf45903013af0982e2fd05ae3d0a6" +deps = ["LinearAlgebra"] +git-tree-sha1 = "f9af7f195fb13589dd2e2d57fdb401717d2eb1f6" uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" -version = "1.2.0" +version = "1.5.0" [[deps.StatsBase]] deps = ["DataAPI", "DataStructures", "LinearAlgebra", "LogExpFunctions", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"] -git-tree-sha1 = "51383f2d367eb3b444c961d485c565e4c0cf4ba0" +git-tree-sha1 = "d1bf48bfcc554a3761a133fe3a9bb01488e06916" uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" -version = "0.33.14" +version = "0.33.21" [[deps.Strided]] deps = ["LinearAlgebra", "TupleTools"] -git-tree-sha1 = "4d581938087ca90eab9bd4bb6d270edaefd70dcd" +git-tree-sha1 = "a7a664c91104329c88222aa20264e1a05b6ad138" uuid = "5e0ebb24-38b0-5f93-81fe-25c709ecae67" -version = "1.1.2" +version = "1.2.3" [[deps.StructArrays]] -deps = ["Adapt", "DataAPI", "StaticArrays", "Tables"] -git-tree-sha1 = "d21f2c564b21a202f4677c0fba5b5ee431058544" +deps = ["Adapt", "DataAPI", "StaticArraysCore", "Tables"] +git-tree-sha1 = "8c6ac65ec9ab781af05b08ff305ddc727c25f680" uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" -version = "0.6.4" +version = "0.6.12" [[deps.TOML]] deps = ["Dates"] uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" +version = "1.0.0" [[deps.TableTraits]] deps = ["IteratorInterfaceExtensions"] @@ -478,14 +602,21 @@ uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c" version = "1.0.1" [[deps.Tables]] -deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "TableTraits", "Test"] -git-tree-sha1 = "bb1064c9a84c52e277f1096cf41434b675cd368b" +deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits", "Test"] +git-tree-sha1 = "2d7164f7b8a066bcfa6224e67736ce0eb54aef5b" uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" -version = "1.6.1" +version = "1.9.0" [[deps.Tar]] deps = ["ArgTools", "SHA"] uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" +version = "1.10.0" + +[[deps.TensorCore]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "1feb45f88d133a655e001435632f019a9a1bcdb6" +uuid = "62fd8b95-f654-4bbd-a8a5-9c27f68ccd50" +version = "0.1.1" [[deps.Test]] deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] @@ -493,15 +624,15 @@ uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [[deps.TimerOutputs]] deps = ["ExprTools", "Printf"] -git-tree-sha1 = "a5aed757f65c8a1c64503bc4035f704d24c749bf" +git-tree-sha1 = "9dfcb767e17b0849d6aaf85997c98a5aea292513" uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" -version = "0.5.14" +version = "0.5.21" [[deps.TranscodingStreams]] deps = ["Random", "Test"] -git-tree-sha1 = "216b95ea110b5972db65aa90f88d8d89dcb8851c" +git-tree-sha1 = "8a75929dcd3c38611db2f8d08546decb514fcadf" uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" -version = "0.9.6" +version = "0.9.9" [[deps.TupleTools]] git-tree-sha1 = "3c712976c47707ff893cf6ba4354aa14db1d8938" @@ -516,10 +647,16 @@ uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" [[deps.UnicodePlots]] -deps = ["Contour", "Crayons", "Dates", "SparseArrays", "StatsBase"] -git-tree-sha1 = "0efb50275f42f2795b771a4ad2c339db9760fd98" +deps = ["ColorTypes", "Contour", "Crayons", "Dates", "FileIO", "FreeTypeAbstraction", "LazyModules", "LinearAlgebra", "MarchingCubes", "NaNMath", "Printf", "SparseArrays", "StaticArrays", "StatsBase", "Unitful"] +git-tree-sha1 = "ae67ab0505b9453655f7d5ea65183a1cd1b3cfa0" uuid = "b8865327-cd53-5732-bb35-84acbb429228" -version = "2.6.0" +version = "2.12.4" + +[[deps.Unitful]] +deps = ["ConstructionBase", "Dates", "LinearAlgebra", "Random"] +git-tree-sha1 = "d57a4ed70b6f9ff1da6719f5f2713706d57e0d66" +uuid = "1986cc42-f94f-5a68-af5c-568840ba703d" +version = "1.12.0" [[deps.Zeros]] deps = ["Test"] @@ -530,12 +667,7 @@ version = "0.3.0" [[deps.Zlib_jll]] deps = ["Libdl"] uuid = "83775a58-1f1d-513f-b197-d71354ab007a" - -[[deps.Zstd_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "cc4bf3fdde8b7e3e9fa0351bdeedba1cf3b7f6e6" -uuid = "3161d3a3-bdf6-5164-811a-617609db77b4" -version = "1.5.0+0" +version = "1.2.12+3" [[deps.ZygoteRules]] deps = ["MacroTools"] @@ -546,11 +678,14 @@ version = "0.2.2" [[deps.libblastrampoline_jll]] deps = ["Artifacts", "Libdl", "OpenBLAS_jll"] uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" +version = "5.1.1+0" [[deps.nghttp2_jll]] deps = ["Artifacts", "Libdl"] uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" +version = "1.47.0+0" [[deps.p7zip_jll]] deps = ["Artifacts", "Libdl"] uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" +version = "17.4.0+0" diff --git a/test/Project.toml b/test/Project.toml index de528f11..6e7c7edc 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -1,5 +1,6 @@ [deps] Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" +Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" ITensorUnicodePlots = "73163f41-4a9e-479f-8353-73bf94dbd758" ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" diff --git a/test/test_abstractgraph.jl b/test/test_abstractgraph.jl new file mode 100644 index 00000000..4d187120 --- /dev/null +++ b/test/test_abstractgraph.jl @@ -0,0 +1,65 @@ +using Test +using Graphs: binary_tree +using ITensorNetworks +using Random + +@testset "Number of neighbors" begin + g1 = comb_tree((3, 2)) + @test num_neighbors(g1, 4) == 1 + @test num_neighbors(g1, 3) == 2 + @test num_neighbors(g1, 2) == 3 + + ng1 = named_comb_tree((3, 2)) + @test num_neighbors(ng1, 1, 2) == 1 + @test num_neighbors(ng1, (3, 1)) == 2 + @test num_neighbors(ng1, (2, 1)) == 3 + + g2 = grid((3, 3)) + @test num_neighbors(g2, 1) == 2 + @test num_neighbors(g2, 2) == 3 + @test num_neighbors(g2, 5) == 4 + + ng2 = named_grid((3, 3)) + @test num_neighbors(ng2, 1, 1) == 2 + @test num_neighbors(ng2, (1, 2)) == 3 + @test num_neighbors(ng2, (2, 2)) == 4 +end + +# TODO: remove once this is merged into NamedGraphs.jl +@testset "Tree graph paths" begin + g1 = comb_tree((3, 2)) + et1 = edgetype(g1) + @test vertex_path(g1, 4, 5) == [4, 1, 2, 5] + @test edge_path(g1, 4, 5) == [et1(4, 1), et1(1, 2), et1(2, 5)] + @test vertex_path(g1, 6, 1) == [6, 3, 2, 1] + @test edge_path(g1, 6, 1) == [et1(6, 3), et1(3, 2), et1(2, 1)] + @test vertex_path(g1, 2, 2) == [2] + @test edge_path(g1, 2, 2) == et1[] + + ng1 = named_comb_tree((3, 2)) + net1 = edgetype(ng1) + @test vertex_path(ng1, (1, 2), (2, 2)) == [(1, 2), (1, 1), (2, 1), (2, 2)] + @test edge_path(ng1, (1, 2), (2, 2)) == + [net1((1, 2), (1, 1)), net1((1, 1), (2, 1)), net1((2, 1), (2, 2))] + @test vertex_path(ng1, (3, 2), (1, 1)) == [(3, 2), (3, 1), (2, 1), (1, 1)] + @test edge_path(ng1, (3, 2), (1, 1)) == + [net1((3, 2), (3, 1)), net1((3, 1), (2, 1)), net1((2, 1), (1, 1))] + @test vertex_path(ng1, (1, 2), (1, 2)) == [(1, 2)] + @test edge_path(ng1, (1, 2), (1, 2)) == net1[] + + g2 = binary_tree(3) + et2 = edgetype(g2) + @test vertex_path(g2, 2, 6) == [2, 1, 3, 6] + @test edge_path(g2, 2, 6) == [et2(2, 1), et2(1, 3), et2(3, 6)] + @test vertex_path(g2, 5, 4) == [5, 2, 4] + @test edge_path(g2, 5, 4) == [et2(5, 2), et2(2, 4)] + + ng2 = named_binary_tree(3) + net2 = edgetype(ng2) + @test vertex_path(ng2, (1, 1), (1, 2, 1)) == [(1, 1), (1,), (1, 2), (1, 2, 1)] + @test edge_path(ng2, (1, 1), (1, 2, 1)) == + [net2((1, 1), (1,)), net2((1,), (1, 2)), net2((1, 2), (1, 2, 1))] + @test vertex_path(ng2, (1, 1, 2), (1, 1, 1)) == [(1, 1, 2), (1, 1), (1, 1, 1)] + @test edge_path(ng2, (1, 1, 2), (1, 1, 1)) == + [net2((1, 1, 2), (1, 1)), net2((1, 1), (1, 1, 1))] +end diff --git a/test/test_itensornetwork.jl b/test/test_itensornetwork.jl new file mode 100644 index 00000000..bbc92b3c --- /dev/null +++ b/test/test_itensornetwork.jl @@ -0,0 +1,102 @@ +using ITensors +using ITensorNetworks +using Random +using Test + +@testset "ITensorNetwork Basics" begin + Random.seed!(1234) + g = named_grid((4,)) + s = siteinds("S=1/2", g) + + @test s isa IndsNetwork + @test nv(s) == 4 + @test ne(s) == 3 + @test neighbors(s, 2) == [(1,), (3,)] + + tn = ITensorNetwork(s; link_space=2) + + @test nv(tn) == 4 + @test ne(tn) == 3 + @test tn isa ITensorNetwork + @test neighbors(tn, 2) == [(1,), (3,)] + @test tn[1] isa ITensor + @test order(tn[1]) == 2 + @test tn[2] isa ITensor + @test order(tn[2]) == 3 + @test tn[1:2] isa ITensorNetwork + + randn!.(vertex_data(tn)) + tn′ = sim(dag(tn); sites=[]) + + @test tn′ isa ITensorNetwork + inner_tn = tn ⊗ tn′ + @test inner_tn isa ITensorNetwork + sequence = optimal_contraction_sequence(inner_tn) + @test sequence isa Vector + inner_res = contract(inner_tn; sequence)[] + @test inner_res isa Float64 + + @testset "Contract edge (regression test for issue #5)" begin + dims = (2, 2) + g = named_grid(dims) + s = siteinds("S=1/2", g) + ψ = ITensorNetwork(s, v -> "↑") + tn = inner_network(ψ, sim(dag(ψ); sites=[])) + tn_2 = contract(tn, (2, 1, 2) => (1, 1, 2)) + @test !has_vertex(tn_2, (2, 1, 2)) + @test tn_2[1, 1, 2] ≈ tn[2, 1, 2] * tn[1, 1, 2] + end + + @testset "Remove edge (regression test for issue #5)" begin + dims = (2, 2) + g = named_grid(dims) + s = siteinds("S=1/2", g) + ψ = ITensorNetwork(s, v -> "↑") + rem_vertex!(ψ, (1, 2)) + tn = inner_network(ψ, sim(dag(ψ); sites=[])) + @test !has_vertex(tn, (1, 1, 2)) + @test !has_vertex(tn, (2, 1, 2)) + @test has_vertex(tn, (1, 1, 1)) + @test has_vertex(tn, (2, 1, 1)) + @test has_vertex(tn, (1, 2, 1)) + @test has_vertex(tn, (2, 2, 1)) + @test has_vertex(tn, (1, 2, 2)) + @test has_vertex(tn, (2, 2, 2)) + end + + @testset "Index access" begin + dims = (2, 2) + g = named_grid(dims) + s = siteinds("S=1/2", g) + ψ = ITensorNetwork(s; link_space=2) + + nt = ITensorNetworks.neighbor_itensors(ψ, 1, 1) + @test length(nt) == 2 + @test all(map(hascommoninds(ψ[1, 1]), nt)) + + @test all(map(t -> isempty(commoninds(inds(t), uniqueinds(ψ, 1, 1))), nt)) + + e = (1, 1) => (2, 1) + uie = uniqueinds(ψ, e) + @test isempty(commoninds(uie, inds(ψ[2, 1]))) + @test issetequal(uie, union(commoninds(ψ[1, 1], ψ[1, 2]), uniqueinds(ψ, 1, 1))) + + @test siteinds(all, ψ, 1, 1) == s[1, 1] + @test siteinds(only, ψ, 1, 1) == only(s[1, 1]) + + cie = commoninds(ψ, e) + @test hasinds(ψ[1, 1], cie) && hasinds(ψ[2, 1], cie) + @test isempty(commoninds(uie, cie)) + + @test linkinds(all, ψ, e) == commoninds(ψ[1, 1], ψ[2, 1]) + @test linkinds(only, ψ, e) == only(commoninds(ψ[1, 1], ψ[2, 1])) + end + + @testset "ElType in constructors" begin + # TODO + end + + @testset "Construction from state (function)" begin + # TODO + end +end diff --git a/test/test_itensornetwork_basics.jl b/test/test_itensornetwork_basics.jl deleted file mode 100644 index 2c982a2c..00000000 --- a/test/test_itensornetwork_basics.jl +++ /dev/null @@ -1,66 +0,0 @@ -using ITensors -using ITensorNetworks -using Random -using Test - -@testset "ITensorNetwork Basics" begin - Random.seed!(1234) - g = named_grid((4,)) - s = siteinds("S=1/2", g) - - @test s isa IndsNetwork - @test nv(s) == 4 - @test ne(s) == 3 - @test neighbors(s, 2) == [(1,), (3,)] - - tn = ITensorNetwork(s; link_space=2) - - @test nv(tn) == 4 - @test ne(tn) == 3 - @test tn isa ITensorNetwork - @test neighbors(tn, 2) == [(1,), (3,)] - @test tn[1] isa ITensor - @test order(tn[1]) == 2 - @test tn[2] isa ITensor - @test order(tn[2]) == 3 - @test tn[1:2] isa ITensorNetwork - - randn!.(vertex_data(tn)) - tn′ = sim(dag(tn); sites=[]) - - @test tn′ isa ITensorNetwork - inner_tn = tn ⊗ tn′ - @test inner_tn isa ITensorNetwork - sequence = optimal_contraction_sequence(inner_tn) - @test sequence isa Vector - inner_res = contract(inner_tn; sequence)[] - @test inner_res isa Float64 -end - -@testset "Contract edge (regression test for issue #5)" begin - dims = (2, 2) - g = named_grid(dims) - s = siteinds("S=1/2", g) - ψ = ITensorNetwork(s, v -> "↑") - tn = inner(ψ, sim(dag(ψ); sites=[])) - tn_2 = contract(tn, (2, 1, 2) => (1, 1, 2)) - @test !has_vertex(tn_2, (2, 1, 2)) - @test tn_2[1, 1, 2] ≈ tn[2, 1, 2] * tn[1, 1, 2] -end - -@testset "Remove edge (regression test for issue #5)" begin - dims = (2, 2) - g = named_grid(dims) - s = siteinds("S=1/2", g) - ψ = ITensorNetwork(s, v -> "↑") - rem_vertex!(ψ, (1, 2)) - tn = inner(ψ, sim(dag(ψ); sites=[])) - @test !has_vertex(tn, (1, 1, 2)) - @test !has_vertex(tn, (2, 1, 2)) - @test has_vertex(tn, (1, 1, 1)) - @test has_vertex(tn, (2, 1, 1)) - @test has_vertex(tn, (1, 2, 1)) - @test has_vertex(tn, (2, 2, 1)) - @test has_vertex(tn, (1, 2, 2)) - @test has_vertex(tn, (2, 2, 2)) -end diff --git a/test/test_namedgraphs.jl b/test/test_namedgraphs.jl new file mode 100644 index 00000000..cbcf6da1 --- /dev/null +++ b/test/test_namedgraphs.jl @@ -0,0 +1,144 @@ +using Test +using ITensorNetworks +using ITensorNetworks: NamedGraph, NamedDimGraph, NamedDimDiGraph, parent_graph +using Random +using Graphs: Graph, DiGraph, grid, add_edge!, rem_edge! + +# TODO: remove once this is merged into NamedGraphs.jl + +@testset "AbstractNamedGraph equality" begin + # NamedGraph + g = grid((2, 2)) + vs = ["A", "B", "C", "D"] + ng1 = NamedGraph(g, vs) + # construct same NamedGraph with different underlying structure + ng2 = NamedGraph(Graph(4), vs[[1, 4, 3, 2]]) + add_edge!(ng2, "A" => "B") + add_edge!(ng2, "A" => "C") + add_edge!(ng2, "B" => "D") + add_edge!(ng2, "C" => "D") + @test parent_graph(ng1) != parent_graph(ng2) + @test ng1 == ng2 + rem_edge!(ng2, "B" => "A") + @test ng1 != ng2 + + # NamedDimGraph + dvs = [("X", 1), ("X", 2), ("Y", 1), ("Y", 2)] + ndg1 = NamedDimGraph(g, dvs) + # construct same NamedDimGraph from different underlying structure + ndg2 = NamedDimGraph(Graph(4), dvs[[1, 4, 3, 2]]) + add_edge!(ndg2, ("X", 1) => ("X", 2)) + add_edge!(ndg2, ("X", 1) => ("Y", 1)) + add_edge!(ndg2, ("X", 2) => ("Y", 2)) + add_edge!(ndg2, ("Y", 1) => ("Y", 2)) + @test parent_graph(ndg1) != parent_graph(ndg2) + @test ndg1 == ndg2 + rem_edge!(ndg2, ("Y", 1) => ("X", 1)) + @test ndg1 != ndg2 + + # NamedDimDiGraph + nddg1 = NamedDimDiGraph(DiGraph(collect(edges(g))), dvs) + # construct same NamedDimDiGraph from different underlying structure + nddg2 = NamedDimDiGraph(DiGraph(4), dvs[[1, 4, 3, 2]]) + add_edge!(nddg2, ("X", 1) => ("X", 2)) + add_edge!(nddg2, ("X", 1) => ("Y", 1)) + add_edge!(nddg2, ("X", 2) => ("Y", 2)) + add_edge!(nddg2, ("Y", 1) => ("Y", 2)) + @test parent_graph(nddg1) != parent_graph(nddg2) + @test nddg1 == nddg2 + rem_edge!(nddg2, ("X", 1) => ("Y", 1)) + add_edge!(nddg2, ("Y", 1) => ("X", 1)) + @test nddg1 != nddg2 +end + +@testset "AbstractNamedGraph vertex renaming" begin + g = grid((2, 2)) + integer_names = collect(1:4) + string_names = ["A", "B", "C", "D"] + tuple_names = [("X", 1), ("X", 2), ("Y", 1), ("Y", 2)] + function_name = x -> reverse(x) + + # NamedGraph + ng = NamedGraph(g, string_names) + # rename to integers + vmap_int = Dictionary(vertices(ng), integer_names) + ng_int = rename_vertices(ng, vmap_int) + @test isa(ng_int, NamedGraph{Int}) + @test has_vertex(ng_int, 3) + @test has_edge(ng_int, 1 => 2) + @test has_edge(ng_int, 2 => 4) + # rename to tuples + vmap_tuple = Dictionary(vertices(ng), tuple_names) + ng_tuple = rename_vertices(ng, vmap_tuple) + @test isa(ng_tuple, NamedGraph{Tuple{String,Int}}) + @test has_vertex(ng_tuple, ("X", 1)) + @test has_edge(ng_tuple, ("X", 1) => ("X", 2)) + @test has_edge(ng_tuple, ("X", 2) => ("Y", 2)) + # rename with name map function + ng_function = rename_vertices(ng_tuple, function_name) + @test isa(ng_function, NamedGraph{Tuple{Int,String}}) + @test has_vertex(ng_function, (1, "X")) + @test has_edge(ng_function, (1, "X") => (2, "X")) + @test has_edge(ng_function, (2, "X") => (2, "Y")) + + # NamedDimGraph + ndg = named_grid((2, 2)) + # rename to integers + vmap_int = Dictionary(vertices(ndg), integer_names) + ndg_int = rename_vertices(ndg, vmap_int) + @test isa(ndg_int, NamedDimGraph{Tuple}) + @test has_vertex(ndg_int, (1,)) + @test has_edge(ndg_int, (1,) => (2,)) + @test has_edge(ndg_int, (2,) => (4,)) + # rename to strings + vmap_string = Dictionary(vertices(ndg), string_names) + ndg_string = rename_vertices(ndg, vmap_string) + @test isa(ndg_string, NamedDimGraph{Tuple}) + @test has_vertex(ndg_string, ("A",)) + @test has_edge(ndg_string, ("A",) => ("B",)) + @test has_edge(ndg_string, ("B",) => ("D",)) + # rename to strings + vmap_tuple = Dictionary(vertices(ndg), tuple_names) + ndg_tuple = rename_vertices(ndg, vmap_tuple) + @test isa(ndg_tuple, NamedDimGraph{Tuple}) + @test has_vertex(ndg_tuple, "X", 1) + @test has_edge(ndg_tuple, ("X", 1) => ("X", 2)) + @test has_edge(ndg_tuple, ("X", 2) => ("Y", 2)) + # rename with name map function + ndg_function = rename_vertices(ndg_tuple, function_name) + @test isa(ndg_function, NamedDimGraph{Tuple}) + @test has_vertex(ndg_function, 1, "X") + @test has_edge(ndg_function, (1, "X") => (2, "X")) + @test has_edge(ndg_function, (2, "X") => (2, "Y")) + + # NamedDimDiGraph + nddg = NamedDimDiGraph(DiGraph(collect(edges(g))), vertices(ndg)) + # rename to integers + vmap_int = Dictionary(vertices(nddg), integer_names) + nddg_int = rename_vertices(nddg, vmap_int) + @test isa(nddg_int, NamedDimDiGraph{Tuple}) + @test has_vertex(nddg_int, (1,)) + @test has_edge(nddg_int, (1,) => (2,)) + @test has_edge(nddg_int, (2,) => (4,)) + # rename to strings + vmap_string = Dictionary(vertices(nddg), string_names) + nddg_string = rename_vertices(nddg, vmap_string) + @test isa(nddg_string, NamedDimDiGraph{Tuple}) + @test has_vertex(nddg_string, ("A",)) + @test has_edge(nddg_string, ("A",) => ("B",)) + @test has_edge(nddg_string, ("B",) => ("D",)) + @test !has_edge(nddg_string, ("D",) => ("B",)) + # rename to strings + vmap_tuple = Dictionary(vertices(nddg), tuple_names) + nddg_tuple = rename_vertices(nddg, vmap_tuple) + @test isa(nddg_tuple, NamedDimDiGraph{Tuple}) + @test has_vertex(nddg_tuple, "X", 1) + @test has_edge(nddg_tuple, ("X", 1) => ("X", 2)) + @test !has_edge(nddg_tuple, ("Y", 2) => ("X", 2)) + # rename with name map function + nddg_function = rename_vertices(nddg_tuple, function_name) + @test isa(nddg_function, NamedDimDiGraph{Tuple}) + @test has_vertex(nddg_function, 1, "X") + @test has_edge(nddg_function, (1, "X") => (2, "X")) + @test has_edge(nddg_function, (2, "X") => (2, "Y")) +end diff --git a/test/test_opsum_to_ttno.jl b/test/test_opsum_to_ttno.jl new file mode 100644 index 00000000..ade2ac6a --- /dev/null +++ b/test/test_opsum_to_ttno.jl @@ -0,0 +1,73 @@ +using Test +using ITensorNetworks +using ITensors +using Random +include("utils.jl") + +@testset "OpSum to TTNO" begin + # small comb tree + tooth_lengths = fill(2, 3) + c = named_comb_tree(tooth_lengths) + root_vertex = (3, 2) + is = siteinds("S=1/2", c) + + # linearized version + linear_order = [4, 1, 2, 5, 3, 6] + vmap = Dictionary(vertices(is)[linear_order], 1:length(linear_order)) + sites = only.(collect(vertex_data(is)))[linear_order] + + # test with next-to-nearest-neighbor Ising Hamiltonian + J1 = -1 + J2 = 2 + h = 0.5 + H = ising_graph(c; J1=J1, J2=J2, h=h) + + # add combination of longer range interactions + Hlr = copy(H) + Hlr += 5, "Z", (1, 2), "Z", (2, 2) + Hlr += -4, "Z", (1, 1), "Z", (2, 2) + + @testset "Finite state machine" begin + # get TTNO Hamiltonian directly + Hfsm = TTNO(H, is; root_vertex=root_vertex, method=:fsm, cutoff=1e-10) + # get corresponding MPO Hamiltonian + Hline = MPO(relabel_sites(H, vmap), sites) + # compare resulting dense Hamiltonians + @disable_warn_order begin + Tttno = prod(Hline) + Tmpo = contract(Hfsm) + end + @test Tttno ≈ Tmpo rtol = 1e-6 + + # same thing for longer range interactions + Hfsm_lr = TTNO(Hlr, is; root_vertex=root_vertex, method=:fsm, cutoff=1e-10) + Hline_lr = MPO(relabel_sites(Hlr, vmap), sites) + @disable_warn_order begin + Tttno_lr = prod(Hline_lr) + Tmpo_lr = contract(Hfsm_lr) + end + @test Tttno_lr ≈ Tmpo_lr rtol = 1e-6 + end + + @testset "Svd approach" begin + # get TTNO Hamiltonian directly + Hsvd = TTNO(H, is; root_vertex=root_vertex, method=:svd, cutoff=1e-10) + # get corresponding MPO Hamiltonian + Hline = MPO(relabel_sites(H, vmap), sites) + # compare resulting dense Hamiltonians + @disable_warn_order begin + Tttno = prod(Hline) + Tmpo = contract(Hsvd) + end + @test Tttno ≈ Tmpo rtol = 1e-6 + + # this breaks for longer range interactions + Hsvd_lr = TTNO(Hlr, is; root_vertex=root_vertex, method=:svd, cutoff=1e-10) + Hline_lr = MPO(relabel_sites(Hlr, vmap), sites) + @disable_warn_order begin + Tttno_lr = prod(Hline_lr) + Tmpo_lr = contract(Hsvd_lr) + end + @test_broken Tttno_lr ≈ Tmpo_lr rtol_lr = 1e-6 + end +end diff --git a/test/test_ttno.jl b/test/test_ttno.jl new file mode 100644 index 00000000..80cf848f --- /dev/null +++ b/test/test_ttno.jl @@ -0,0 +1,51 @@ +using Test +using ITensorNetworks +using ITensors +using Random + +@testset "TTNO Basics" begin + + # random comb tree + tooth_lengths = rand(1:3, rand(2:4)) + c = named_comb_tree(tooth_lengths) + # specify random site dimension on every site + dmap = v -> rand(1:3) + is = siteinds(dmap, c) + # operator site inds + is_isp = merge(is, prime(is; links=[])) + # specify random linear vertex ordering of graph vertices + vertex_order = shuffle(vertices(c)) + + @testset "Construct TTNO from ITensor or Array" begin + cutoff = 1e-10 + sites_o = [is_isp[v] for v in vertex_order] + # create random ITensor with these indices + O = randomITensor(sites_o...) + # dense TTNS constructor from IndsNetwork + @disable_warn_order o1 = TTNO(O, is_isp; cutoff) + # dense TTNS constructor from Vector{Vector{Index}} and NamedDimGraph + @disable_warn_order o2 = TTNO(O, sites_o, c; vertex_order, cutoff) + # convert to array with proper index order + AO = Array(O, sites_o...) + # dense array constructor from IndsNetwork + @disable_warn_order o3 = TTNO(AO, is_isp; vertex_order, cutoff) + # dense array constructor from Vector{Vector{Index}} and NamedDimGraph + @disable_warn_order o4 = TTNO(AO, sites_o, c; vertex_order, cutoff) + # see if this actually worked + root_vertex = only(ortho_center(o1)) + @disable_warn_order begin + O1 = contract(o1, root_vertex) + O2 = contract(o2, root_vertex) + O3 = contract(o3, root_vertex) + O4 = contract(o4, root_vertex) + end + @test norm(O - O1) < 1e2 * cutoff + @test norm(O - O2) < 1e2 * cutoff + @test norm(O - O3) < 1e2 * cutoff + @test norm(O - O4) < 1e2 * cutoff + end + + @testset "Ortho" begin + # TODO + end +end diff --git a/test/test_ttns.jl b/test/test_ttns.jl new file mode 100644 index 00000000..25fe8b38 --- /dev/null +++ b/test/test_ttns.jl @@ -0,0 +1,49 @@ +using Test +using ITensorNetworks +using ITensors +using Random + +@testset "TTNS Basics" begin + + # random comb tree + tooth_lengths = rand(1:3, rand(2:4)) + c = named_comb_tree(tooth_lengths) + # specify random site dimension on every site + dmap = v -> rand(1:3) + is = siteinds(dmap, c) + # specify random linear vertex ordering of graph vertices + vertex_order = shuffle(vertices(c)) + + @testset "Construct TTNS from ITensor or Array" begin + cutoff = 1e-10 + sites_s = [only(is[v]) for v in vertex_order] + # create random ITensor with these indices + S = randomITensor(vertex_data(is)...) + # dense TTNS constructor from IndsNetwork + @disable_warn_order s1 = TTNS(S, is; cutoff) + # dense TTNS constructor from Vector{Index} and NamedDimGraph + @disable_warn_order s2 = TTNS(S, sites_s, c; vertex_order, cutoff) + # convert to array with proper index order + @disable_warn_order AS = Array(S, sites_s...) + # dense array constructor from IndsNetwork + @disable_warn_order s3 = TTNS(AS, is; vertex_order, cutoff) + # dense array constructor from Vector{Index} and NamedDimGraph + @disable_warn_order s4 = TTNS(AS, sites_s, c; vertex_order, cutoff) + # see if this actually worked + root_vertex = only(ortho_center(s1)) + @disable_warn_order begin + S1 = contract(s1, root_vertex) + S2 = contract(s2, root_vertex) + S3 = contract(s3, root_vertex) + S4 = contract(s4, root_vertex) + end + @test norm(S - S1) < 1e2 * cutoff + @test norm(S - S2) < 1e2 * cutoff + @test norm(S - S3) < 1e2 * cutoff + @test norm(S - S4) < 1e2 * cutoff + end + + @testset "Ortho" begin + # TODO + end +end diff --git a/test/utils.jl b/test/utils.jl new file mode 100644 index 00000000..1709f3a5 --- /dev/null +++ b/test/utils.jl @@ -0,0 +1,81 @@ +using Graphs: AbstractGraph, grid +using Dictionaries: AbstractDictionary + +_maybe_fill(x, n) = x +_maybe_fill(x::Number, n) = fill(x, n) + +""" +Random field J1-J2 Heisenberg model on a general graph +""" +function heisenberg_graph( + g::AbstractGraph; J1=1.0, J2=0.0, h::Union{<:Real,Vector{<:Real}}=0 +) + h = _maybe_fill(h, nv(g)) + H = OpSum() + if !iszero(J1) + for e in edges(g) + H += J1 / 2, "S+", src(e), "S-", dst(e) + H += J1 / 2, "S-", src(e), "S+", dst(e) + H += J1, "Sz", src(e), "Sz", dst(e) + end + end + if !iszero(J2) + # TODO, more clever way of looping over next to nearest neighbors? + for (i, v) in enumerate(vertices(g)) + nnn = [neighbors(g, n) for n in neighbors(g, v)] + nnn = setdiff(Base.Iterators.flatten(nnn), neighbors(g, v)) + nnn = setdiff(nnn, vertices(g)[1:i]) + for nn in nnn + H += J2 / 2, "S+", v, "S-", nn + H += J2 / 2, "S-", v, "S+", nn + H += J2, "Sz", v, "Sz", nn + end + end + end + for (i, v) in enumerate(vertices(g)) + if !iszero(h[i]) + H -= h[i], "Sz", v + end + end + return H +end + +""" +Next-to-nearest-neighbor Ising model (ZZX) on a general graph +""" +function ising_graph(g::AbstractGraph; J1=-1.0, J2=0.0, h::Union{<:Real,Vector{<:Real}}=0) + h = _maybe_fill(h, nv(g)) + H = OpSum() + if !iszero(J1) + for e in edges(g) + H += J1, "Z", src(e), "Z", dst(e) + end + end + if !iszero(J2) + # TODO, more clever way of looping over next to nearest neighbors? + for (i, v) in enumerate(vertices(g)) + nnn = [neighbors(g, n) for n in neighbors(g, v)] + nnn = setdiff(Base.Iterators.flatten(nnn), neighbors(g, v)) + nnn = setdiff(nnn, vertices(g)[1:i]) + for nn in nnn + H += J2, "Z", v, "Z", nn + end + end + end + for (i, v) in enumerate(vertices(g)) + if !iszero(h[i]) + H += h[i], "X", v + end + end + return H +end + +""" +Random field J1-J2 Heisenberg model on a chain of length N +""" +heisenberg(N; kwargs...) = heisenberg_graph(grid((N,)); kwargs...) + +""" +Next-to-nearest-neighbor Ising model (ZZX) on a chain of length N +""" +ising(N; kwargs...) = ising(grid((N,)); kwargs...) From b3ca106038cd67ea48fd89896dd0e4ba2285f0cb Mon Sep 17 00:00:00 2001 From: leburgel Date: Tue, 1 Nov 2022 10:25:48 -0700 Subject: [PATCH 02/13] Switch off numerical truncation by default in OpSum->TTNO converter Remove tree graph paths (merged into NamedGraphs.jl) --- src/ITensorNetworks.jl | 1 - src/graphs/abstractgraph.jl | 30 ------------------- src/treetensornetwork/opsum_to_ttno.jl | 7 ++++- test/test_abstractgraph.jl | 40 -------------------------- 4 files changed, 6 insertions(+), 72 deletions(-) delete mode 100644 src/graphs/abstractgraph.jl diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 94ad8abf..4411f91a 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -122,7 +122,6 @@ include(joinpath("treetensornetwork", "opsum_to_ttno.jl")) include(joinpath("treetensornetwork", "abstractprojttno.jl")) include(joinpath("treetensornetwork", "projttno.jl")) include(joinpath("treetensornetwork", "projttnosum.jl")) -include(joinpath("graphs", "abstractgraph.jl")) include(joinpath("graphs", "namedgraphs.jl")) include("utility.jl") diff --git a/src/graphs/abstractgraph.jl b/src/graphs/abstractgraph.jl deleted file mode 100644 index 37257398..00000000 --- a/src/graphs/abstractgraph.jl +++ /dev/null @@ -1,30 +0,0 @@ -using Graphs.SimpleTraits - -# TODO: remove once this is merged into NamedGraphs.jl - -# paths for tree graphs - -@traitfn function vertex_path(graph::::(!IsDirected), s, t) - dfs_tree_graph = dfs_tree(graph, t...) - return vertex_path(dfs_tree_graph, s, t) -end - -@traitfn function edge_path(graph::::(!IsDirected), s, t) - dfs_tree_graph = dfs_tree(graph, t...) - return edge_path(dfs_tree_graph, s, t) -end - -# assumes the graph is a rooted directed tree with root d -@traitfn function vertex_path(graph::::IsDirected, s, t) - vertices = eltype(graph)[s] - while vertices[end] != t - push!(vertices, parent_vertex(graph, vertices[end]...)) - end - return vertices -end - -@traitfn function edge_path(graph::::IsDirected, s, t) - vertices = vertex_path(graph, s, t) - pop!(vertices) - return [edgetype(graph)(vertex, parent_vertex(graph, vertex...)) for vertex in vertices] -end diff --git a/src/treetensornetwork/opsum_to_ttno.jl b/src/treetensornetwork/opsum_to_ttno.jl index 1d7de470..ef06ed0f 100644 --- a/src/treetensornetwork/opsum_to_ttno.jl +++ b/src/treetensornetwork/opsum_to_ttno.jl @@ -526,7 +526,8 @@ function TTNO( sites::IndsNetwork{<:Index}; root_vertex::Tuple=default_root_vertex(sites), splitblocks=false, - method::Symbol=:fsm, # default to construction from finite state machine with manual truncation until svdTTNO is fixed + method::Symbol=:fsm, # default to construction from finite state machine until svdTTNO is fixed + trunc=false, kwargs..., )::TTNO length(ITensors.terms(os)) == 0 && error("OpSum has no terms") @@ -545,6 +546,10 @@ function TTNO( T = svdTTNO(os, sites, root_vertex; kwargs...) elseif method == :fsm T = fsmTTNO(os, sites, root_vertex) + end + # add option for numerical truncation, but throw warning as this can fail sometimes + if trunc + @warn "Naive numerical truncation of TTNO Hamiltonian may fail for larger systems." # see https://github.com/ITensor/ITensors.jl/issues/526 lognormT = lognorm(T) T /= exp(lognormT / nv(T)) # TODO: fix broadcasting for in-place assignment diff --git a/test/test_abstractgraph.jl b/test/test_abstractgraph.jl index 4d187120..cd3f1249 100644 --- a/test/test_abstractgraph.jl +++ b/test/test_abstractgraph.jl @@ -1,5 +1,4 @@ using Test -using Graphs: binary_tree using ITensorNetworks using Random @@ -24,42 +23,3 @@ using Random @test num_neighbors(ng2, (1, 2)) == 3 @test num_neighbors(ng2, (2, 2)) == 4 end - -# TODO: remove once this is merged into NamedGraphs.jl -@testset "Tree graph paths" begin - g1 = comb_tree((3, 2)) - et1 = edgetype(g1) - @test vertex_path(g1, 4, 5) == [4, 1, 2, 5] - @test edge_path(g1, 4, 5) == [et1(4, 1), et1(1, 2), et1(2, 5)] - @test vertex_path(g1, 6, 1) == [6, 3, 2, 1] - @test edge_path(g1, 6, 1) == [et1(6, 3), et1(3, 2), et1(2, 1)] - @test vertex_path(g1, 2, 2) == [2] - @test edge_path(g1, 2, 2) == et1[] - - ng1 = named_comb_tree((3, 2)) - net1 = edgetype(ng1) - @test vertex_path(ng1, (1, 2), (2, 2)) == [(1, 2), (1, 1), (2, 1), (2, 2)] - @test edge_path(ng1, (1, 2), (2, 2)) == - [net1((1, 2), (1, 1)), net1((1, 1), (2, 1)), net1((2, 1), (2, 2))] - @test vertex_path(ng1, (3, 2), (1, 1)) == [(3, 2), (3, 1), (2, 1), (1, 1)] - @test edge_path(ng1, (3, 2), (1, 1)) == - [net1((3, 2), (3, 1)), net1((3, 1), (2, 1)), net1((2, 1), (1, 1))] - @test vertex_path(ng1, (1, 2), (1, 2)) == [(1, 2)] - @test edge_path(ng1, (1, 2), (1, 2)) == net1[] - - g2 = binary_tree(3) - et2 = edgetype(g2) - @test vertex_path(g2, 2, 6) == [2, 1, 3, 6] - @test edge_path(g2, 2, 6) == [et2(2, 1), et2(1, 3), et2(3, 6)] - @test vertex_path(g2, 5, 4) == [5, 2, 4] - @test edge_path(g2, 5, 4) == [et2(5, 2), et2(2, 4)] - - ng2 = named_binary_tree(3) - net2 = edgetype(ng2) - @test vertex_path(ng2, (1, 1), (1, 2, 1)) == [(1, 1), (1,), (1, 2), (1, 2, 1)] - @test edge_path(ng2, (1, 1), (1, 2, 1)) == - [net2((1, 1), (1,)), net2((1,), (1, 2)), net2((1, 2), (1, 2, 1))] - @test vertex_path(ng2, (1, 1, 2), (1, 1, 1)) == [(1, 1, 2), (1, 1), (1, 1, 1)] - @test edge_path(ng2, (1, 1, 2), (1, 1, 1)) == - [net2((1, 1, 2), (1, 1)), net2((1, 1), (1, 1, 1))] -end From e00c345b95ffbcbf7d579ce710f41ce831a57a2b Mon Sep 17 00:00:00 2001 From: leburgel Date: Thu, 3 Nov 2022 16:39:26 -0700 Subject: [PATCH 03/13] Remove functionalities that were merged into NamedGraphs.jl Extend `models.jl` and remove `test/utils.jl` Change lazy `inner` to explicit `inner_network` --- src/ITensorNetworks.jl | 2 - src/abstractitensornetwork.jl | 4 +- src/exports.jl | 1 + src/graphs/namedgraphs.jl | 49 ------------ src/models.jl | 78 ++++++++++++++++-- src/namedgraphs.jl | 23 ------ test/Manifest.toml | 92 ++++++++++++++++------ test/Project.toml | 2 + test/test_namedgraphs.jl | 144 ---------------------------------- test/test_opsum_to_ttno.jl | 3 +- test/test_ttno.jl | 2 +- test/utils.jl | 81 ------------------- 12 files changed, 149 insertions(+), 332 deletions(-) delete mode 100644 src/graphs/namedgraphs.jl delete mode 100644 src/namedgraphs.jl delete mode 100644 test/test_namedgraphs.jl delete mode 100644 test/utils.jl diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 8583f604..2659ada8 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -113,7 +113,6 @@ const UniformDataGraph{D} = NamedDimDataGraph{ } include("utils.jl") -include("namedgraphs.jl") include("itensors.jl") include("partition.jl") include("lattices.jl") @@ -135,7 +134,6 @@ include(joinpath("treetensornetwork", "opsum_to_ttno.jl")) include(joinpath("treetensornetwork", "abstractprojttno.jl")) include(joinpath("treetensornetwork", "projttno.jl")) include(joinpath("treetensornetwork", "projttnosum.jl")) -include(joinpath("graphs", "namedgraphs.jl")) include("utility.jl") include("exports.jl") diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 9e71cd7e..1ada1d25 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -546,7 +546,7 @@ function norm_network(tn::AbstractITensorNetwork; kwargs...) end function flattened_inner_network(ϕ::AbstractITensorNetwork, ψ::AbstractITensorNetwork) - tn = inner(prime(ϕ; sites=[]), ψ) + tn = inner_network(prime(ϕ; sites=[]), ψ) for v in vertices(ψ) tn = contract(tn, (2, v...) => (1, v...)) end @@ -559,7 +559,7 @@ function contract_inner( sequence=nothing, contraction_sequence_kwargs=(;), ) - tn = inner(prime(ϕ; sites=[]), ψ) + tn = inner_network(prime(ϕ; sites=[]), ψ) # TODO: convert to an IndsNetwork and compute the contraction sequence for v in vertices(ψ) tn = contract(tn, (2, v...) => (1, v...)) diff --git a/src/exports.jl b/src/exports.jl index 238c581c..992a30e0 100644 --- a/src/exports.jl +++ b/src/exports.jl @@ -88,6 +88,7 @@ export AbstractITensorNetwork, # namedgraphs.jl rename_vertices, # models.jl + heisenberg, ising, # opsum.jl group_terms, diff --git a/src/graphs/namedgraphs.jl b/src/graphs/namedgraphs.jl deleted file mode 100644 index b765c165..00000000 --- a/src/graphs/namedgraphs.jl +++ /dev/null @@ -1,49 +0,0 @@ -using NamedGraphs: AbstractNamedEdge - -# TODO: remove once this is merged into NamedGraphs.jl - -function Base.:(==)(g1::GT, g2::GT) where {GT<:AbstractNamedGraph} - issetequal(vertices(g1), vertices(g2)) || return false - for v in vertices(g1) - issetequal(inneighbors(g1, v), inneighbors(g2, v)) || return false - issetequal(outneighbors(g1, v), outneighbors(g2, v)) || return false - end - return true -end - -# renaming routines for general named graphs - -function rename_vertices(e::ET, name_map::Dictionary) where {ET<:AbstractNamedEdge} - # strip type parameter to allow renaming to change the vertex type - base_edge_type = Base.typename(ET).wrapper - return base_edge_type(name_map[src(e)], name_map[dst(e)]) -end - -function rename_vertices(g::GT, name_map::Dictionary) where {GT<:AbstractNamedGraph} - original_vertices = vertices(g) - new_vertices = getindices(name_map, original_vertices) - # strip type parameter to allow renaming to change the vertex type - base_graph_type = Base.typename(GT).wrapper - new_g = base_graph_type(new_vertices) - for e in edges(g) - add_edge!(new_g, rename_vertices(e, name_map)) - end - return new_g -end - -function rename_vertices(g::AbstractNamedGraph, name_map::Function) - original_vertices = vertices(g) - return rename_vertices(g, Dictionary(original_vertices, name_map.(original_vertices))) -end - -function NamedGraphs.NamedGraph(vertices::Vector) - return NamedGraph(Graph(length(vertices)), vertices) -end - -function NamedGraphs.NamedDimGraph(vertices::Array) - return NamedDimGraph(Graph(length(vertices)); vertices) -end - -function NamedGraphs.NamedDimDiGraph(vertices::Array) - return NamedDimDiGraph(DiGraph(length(vertices)); vertices) -end diff --git a/src/models.jl b/src/models.jl index 88380832..ed342487 100644 --- a/src/models.jl +++ b/src/models.jl @@ -1,10 +1,78 @@ -function ising(g::AbstractGraph; h) +_maybe_fill(x, n) = x +_maybe_fill(x::Number, n) = fill(x, n) + +""" +Random field J1-J2 Heisenberg model on a general graph +""" +function heisenberg( + g::AbstractGraph; J1=1.0, J2=0.0, h::Union{<:Real,Vector{<:Real}}=0 +) + h = _maybe_fill(h, nv(g)) ℋ = OpSum() - for e in edges(g) - ℋ -= "Z", maybe_only(src(e)), "Z", maybe_only(dst(e)) + if !iszero(J1) + for e in edges(g) + ℋ += J1 / 2, "S+", maybe_only(src(e)), "S-", maybe_only(dst(e)) + ℋ += J1 / 2, "S-", maybe_only(src(e)), "S+", maybe_only(dst(e)) + ℋ += J1, "Sz", maybe_only(src(e)), "Sz", maybe_only(dst(e)) + end end - for v in vertices(g) - ℋ += h, "X", maybe_only(v) + if !iszero(J2) + # TODO, more clever way of looping over next to nearest neighbors? + for (i, v) in enumerate(vertices(g)) + nnn = [neighbors(g, n) for n in neighbors(g, v)] + nnn = setdiff(Base.Iterators.flatten(nnn), neighbors(g, v)) + nnn = setdiff(nnn, vertices(g)[1:i]) + for nn in nnn + ℋ += J2 / 2, "S+", maybe_only(v), "S-", maybe_only(nn) + ℋ += J2 / 2, "S-", maybe_only(v), "S+", maybe_only(nn) + ℋ += J2, "Sz", maybe_only(v), "Sz", maybe_only(nn) + end + end + end + for (i, v) in enumerate(vertices(g)) + if !iszero(h[i]) + ℋ -= h[i], "Sz", maybe_only(v) + end + end + return ℋ +end + +""" +Next-to-nearest-neighbor Ising model (ZZX) on a general graph +""" +function ising(g::AbstractGraph; J1=-1.0, J2=0.0, h::Union{<:Real,Vector{<:Real}}=0) + h = _maybe_fill(h, nv(g)) + ℋ = OpSum() + if !iszero(J1) + for e in edges(g) + ℋ += J1, "Z", maybe_only(src(e)), "Z", maybe_only(dst(e)) + end + end + if !iszero(J2) + # TODO, more clever way of looping over next to nearest neighbors? + for (i, v) in enumerate(vertices(g)) + nnn = [neighbors(g, n) for n in neighbors(g, v)] + nnn = setdiff(Base.Iterators.flatten(nnn), neighbors(g, v)) + nnn = setdiff(nnn, vertices(g)[1:i]) + for nn in nnn + ℋ += J2, "Z", maybe_only(v), "Z", maybe_only(nn) + end + end + end + for (i, v) in enumerate(vertices(g)) + if !iszero(h[i]) + ℋ += h[i], "X", maybe_only(v) + end end return ℋ end + +""" +Random field J1-J2 Heisenberg model on a chain of length N +""" +heisenberg(N::Integer; kwargs...) = heisenberg(grid((N,)); kwargs...) + +""" +Next-to-nearest-neighbor Ising model (ZZX) on a chain of length N +""" +ising(N::Integer; kwargs...) = ising(grid((N,)); kwargs...) diff --git a/src/namedgraphs.jl b/src/namedgraphs.jl deleted file mode 100644 index 29a601ff..00000000 --- a/src/namedgraphs.jl +++ /dev/null @@ -1,23 +0,0 @@ -NamedGraphs.NamedDimGraph(vertices::Vector) = NamedDimGraph(tuple.(vertices)) -function NamedGraphs.NamedDimGraph(vertices::Vector{<:Tuple}) - return NamedDimGraph(Graph(length(vertices)); vertices) -end - -function rename_vertices(e::AbstractEdge, name_map::Dictionary) - return typeof(e)(name_map[src(e)], name_map[dst(e)]) -end - -function rename_vertices(g::NamedDimGraph, name_map::Dictionary) - original_vertices = vertices(g) - new_vertices = getindices(name_map, original_vertices) - new_g = NamedDimGraph(new_vertices) - for e in edges(g) - add_edge!(new_g, rename_vertices(e, name_map)) - end - return new_g -end - -function rename_vertices(g::NamedDimGraph, name_map::Function) - original_vertices = vertices(g) - return rename_vertices(g, Dictionary(original_vertices, name_map.(original_vertices))) -end diff --git a/test/Manifest.toml b/test/Manifest.toml index ef24cbb4..ab3e5c22 100644 --- a/test/Manifest.toml +++ b/test/Manifest.toml @@ -2,7 +2,7 @@ julia_version = "1.8.0-rc3" manifest_format = "2.0" -project_hash = "e7d2e1ca582bfd915f1ed57071b99f80ba7b64a0" +project_hash = "b89016743fe10b38a47b969990cbb0eb56582979" [[deps.AbstractTrees]] git-tree-sha1 = "03e0550477d86222521d254b741d470ba17ea0b5" @@ -31,6 +31,11 @@ uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" [[deps.Base64]] uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +[[deps.BetterExp]] +git-tree-sha1 = "dd3448f3d5b2664db7eceeec5f744535ce6e759b" +uuid = "7cffe744-45fd-4178-b173-cf893948b8b7" +version = "0.1.0" + [[deps.BitIntegers]] deps = ["Random"] git-tree-sha1 = "5a814467bda636f3dde5c4ef83c30dd0a19928e0" @@ -132,9 +137,9 @@ uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" [[deps.Dictionaries]] deps = ["Indexing", "Random", "Serialization"] -git-tree-sha1 = "96dc5c5c8994be519ee3420953c931c55657a3f2" +git-tree-sha1 = "e82c3c97b5b4ec111f3c1b55228cebc7510525a2" uuid = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" -version = "0.3.24" +version = "0.3.25" [[deps.Distributed]] deps = ["Random", "Serialization", "Sockets"] @@ -142,9 +147,9 @@ uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" [[deps.DocStringExtensions]] deps = ["LibGit2"] -git-tree-sha1 = "5158c2b41018c5f7eb1470d558127ac274eca0c9" +git-tree-sha1 = "c36550cb29cbe373e95b3f40486b9a4148f89ffd" uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" -version = "0.9.1" +version = "0.9.2" [[deps.Downloads]] deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] @@ -169,9 +174,9 @@ version = "0.1.1" [[deps.FileIO]] deps = ["Pkg", "Requires", "UUIDs"] -git-tree-sha1 = "94f5101b96d2d968ace56f7f2db19d0a5f592e28" +git-tree-sha1 = "7be5f99f7d15578798f338f5433b6c432ea8037b" uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549" -version = "1.15.0" +version = "1.16.0" [[deps.FileWatching]] uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" @@ -226,9 +231,9 @@ version = "1.7.4" [[deps.HDF5]] deps = ["Compat", "HDF5_jll", "Libdl", "Mmap", "Random", "Requires"] -git-tree-sha1 = "899f041bf330ebeead3637073b2ca7477760edde" +git-tree-sha1 = "19effd6b5af759c8aaeb9c77f89422d3f975ab65" uuid = "f67ccb44-e63f-5c2f-98bd-6dc0ccc4ba2f" -version = "0.16.11" +version = "0.16.12" [[deps.HDF5_jll]] deps = ["Artifacts", "JLLWrappers", "LibCURL_jll", "Libdl", "OpenSSL_jll", "Pkg", "Zlib_jll"] @@ -297,9 +302,9 @@ version = "1.0.0" [[deps.JLD2]] deps = ["FileIO", "MacroTools", "Mmap", "OrderedCollections", "Pkg", "Printf", "Reexport", "TranscodingStreams", "UUIDs"] -git-tree-sha1 = "0d0ad913e827d13c5e88a73f9333d7e33c424576" +git-tree-sha1 = "1c3ff7416cb727ebf4bab0491a56a296d7b8cf1d" uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819" -version = "0.4.24" +version = "0.4.25" [[deps.JLLWrappers]] deps = ["Preferences"] @@ -307,6 +312,24 @@ git-tree-sha1 = "abc9885a7ca2052a736a600f7fa66209f96506e1" uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" version = "1.4.1" +[[deps.JSON]] +deps = ["Dates", "Mmap", "Parsers", "Unicode"] +git-tree-sha1 = "3c837543ddb02250ef42f4738347454f95079d4e" +uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" +version = "0.21.3" + +[[deps.KaHyPar]] +deps = ["KaHyPar_jll", "Libdl", "LinearAlgebra", "SparseArrays"] +git-tree-sha1 = "01107e1518357a447aaf6170158179e03e8e29da" +uuid = "2a6221f6-aa48-11e9-3542-2d9e0ef01880" +version = "0.3.0" + +[[deps.KaHyPar_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "boost_jll"] +git-tree-sha1 = "9f47089457d5d8e1e2a901465d5cb7411991ac05" +uuid = "87a0c12d-51e1-52a8-b1ed-2b00825fe6a4" +version = "1.3.0+0" + [[deps.KrylovKit]] deps = ["LinearAlgebra", "Printf"] git-tree-sha1 = "49b0c1dd5c292870577b8f58c51072bd558febb9" @@ -365,9 +388,9 @@ uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" [[deps.MacroTools]] deps = ["Markdown", "Random"] -git-tree-sha1 = "3d3e902b31198a27340d0bf00d6ac452866021cf" +git-tree-sha1 = "42324d08725e200c23d4dfb549e0d5d89dede2d2" uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" -version = "0.5.9" +version = "0.5.10" [[deps.MarchingCubes]] deps = ["SnoopPrecompile", "StaticArrays"] @@ -425,6 +448,12 @@ version = "0.4.4" uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" version = "1.2.0" +[[deps.OMEinsumContractionOrders]] +deps = ["AbstractTrees", "BetterExp", "JSON", "Requires", "SparseArrays", "Suppressor"] +git-tree-sha1 = "0d4fbd4f2d368bf104671187dcd716a2fac533e0" +uuid = "6f22d1fd-8eed-4bb7-9776-e7d684900715" +version = "0.8.1" + [[deps.OpenBLAS_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" @@ -437,9 +466,9 @@ version = "0.8.1+0" [[deps.OpenSSL_jll]] deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "e60321e3f2616584ff98f0a4f18d98ae6f89bbb3" +git-tree-sha1 = "f6e9dba33f9f2c44e08a020b0caf6903be540004" uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95" -version = "1.1.17+0" +version = "1.1.19+0" [[deps.OpenSpecFun_jll]] deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] @@ -454,9 +483,15 @@ version = "1.4.1" [[deps.PackageCompiler]] deps = ["Artifacts", "LazyArtifacts", "Libdl", "Pkg", "Printf", "RelocatableFolders", "TOML", "UUIDs"] -git-tree-sha1 = "c497e2bb9c2127a411b74dbff56b11f258d67d12" +git-tree-sha1 = "f31ea705915b4e16e8104727ebd99e0c3993478d" uuid = "9b87118b-4619-50d2-8e1e-99f35a4d4d9d" -version = "2.0.9" +version = "2.1.0" + +[[deps.Parsers]] +deps = ["Dates"] +git-tree-sha1 = "6c01a9b494f6d2a9fc180a08b182fcb06f0958a0" +uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" +version = "2.4.2" [[deps.Pkg]] deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] @@ -488,9 +523,9 @@ version = "1.2.2" [[deps.RelocatableFolders]] deps = ["SHA", "Scratch"] -git-tree-sha1 = "cdbd3b1338c72ce29d9584fdbe9e9b70eeb5adca" +git-tree-sha1 = "90bc7a7c96410424509e4263e277e43250c05691" uuid = "05181044-ff0b-4ac5-8273-598c1e38db00" -version = "0.1.3" +version = "1.0.0" [[deps.Requires]] deps = ["UUIDs"] @@ -586,9 +621,14 @@ version = "1.2.3" [[deps.StructArrays]] deps = ["Adapt", "DataAPI", "StaticArraysCore", "Tables"] -git-tree-sha1 = "8c6ac65ec9ab781af05b08ff305ddc727c25f680" +git-tree-sha1 = "13237798b407150a6d2e2bce5d793d7d9576e99e" uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a" -version = "0.6.12" +version = "0.6.13" + +[[deps.Suppressor]] +git-tree-sha1 = "c6ed566db2fe3931292865b966d6d140b7ef32a9" +uuid = "fd094767-a336-5f1f-9728-57cf17d0bbfb" +version = "0.2.1" [[deps.TOML]] deps = ["Dates"] @@ -603,9 +643,9 @@ version = "1.0.1" [[deps.Tables]] deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits", "Test"] -git-tree-sha1 = "2d7164f7b8a066bcfa6224e67736ce0eb54aef5b" +git-tree-sha1 = "c79322d36826aa2f4fd8ecfa96ddb47b174ac78d" uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" -version = "1.9.0" +version = "1.10.0" [[deps.Tar]] deps = ["ArgTools", "SHA"] @@ -675,6 +715,12 @@ git-tree-sha1 = "8c1a8e4dfacb1fd631745552c8db35d0deb09ea0" uuid = "700de1a5-db45-46bc-99cf-38207098b444" version = "0.2.2" +[[deps.boost_jll]] +deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"] +git-tree-sha1 = "7a89efe0137720ca82f99e8daa526d23120d0d37" +uuid = "28df3c45-c428-5900-9ff8-a3135698ca75" +version = "1.76.0+1" + [[deps.libblastrampoline_jll]] deps = ["Artifacts", "Libdl", "OpenBLAS_jll"] uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" diff --git a/test/Project.toml b/test/Project.toml index 6e7c7edc..1764b826 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -3,5 +3,7 @@ Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4" Graphs = "86223c79-3864-5bf0-83f7-82e725a168b6" ITensorUnicodePlots = "73163f41-4a9e-479f-8353-73bf94dbd758" ITensors = "9136182c-28ba-11e9-034c-db9fb085ebd5" +KaHyPar = "2a6221f6-aa48-11e9-3542-2d9e0ef01880" +OMEinsumContractionOrders = "6f22d1fd-8eed-4bb7-9776-e7d684900715" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/test/test_namedgraphs.jl b/test/test_namedgraphs.jl deleted file mode 100644 index cbcf6da1..00000000 --- a/test/test_namedgraphs.jl +++ /dev/null @@ -1,144 +0,0 @@ -using Test -using ITensorNetworks -using ITensorNetworks: NamedGraph, NamedDimGraph, NamedDimDiGraph, parent_graph -using Random -using Graphs: Graph, DiGraph, grid, add_edge!, rem_edge! - -# TODO: remove once this is merged into NamedGraphs.jl - -@testset "AbstractNamedGraph equality" begin - # NamedGraph - g = grid((2, 2)) - vs = ["A", "B", "C", "D"] - ng1 = NamedGraph(g, vs) - # construct same NamedGraph with different underlying structure - ng2 = NamedGraph(Graph(4), vs[[1, 4, 3, 2]]) - add_edge!(ng2, "A" => "B") - add_edge!(ng2, "A" => "C") - add_edge!(ng2, "B" => "D") - add_edge!(ng2, "C" => "D") - @test parent_graph(ng1) != parent_graph(ng2) - @test ng1 == ng2 - rem_edge!(ng2, "B" => "A") - @test ng1 != ng2 - - # NamedDimGraph - dvs = [("X", 1), ("X", 2), ("Y", 1), ("Y", 2)] - ndg1 = NamedDimGraph(g, dvs) - # construct same NamedDimGraph from different underlying structure - ndg2 = NamedDimGraph(Graph(4), dvs[[1, 4, 3, 2]]) - add_edge!(ndg2, ("X", 1) => ("X", 2)) - add_edge!(ndg2, ("X", 1) => ("Y", 1)) - add_edge!(ndg2, ("X", 2) => ("Y", 2)) - add_edge!(ndg2, ("Y", 1) => ("Y", 2)) - @test parent_graph(ndg1) != parent_graph(ndg2) - @test ndg1 == ndg2 - rem_edge!(ndg2, ("Y", 1) => ("X", 1)) - @test ndg1 != ndg2 - - # NamedDimDiGraph - nddg1 = NamedDimDiGraph(DiGraph(collect(edges(g))), dvs) - # construct same NamedDimDiGraph from different underlying structure - nddg2 = NamedDimDiGraph(DiGraph(4), dvs[[1, 4, 3, 2]]) - add_edge!(nddg2, ("X", 1) => ("X", 2)) - add_edge!(nddg2, ("X", 1) => ("Y", 1)) - add_edge!(nddg2, ("X", 2) => ("Y", 2)) - add_edge!(nddg2, ("Y", 1) => ("Y", 2)) - @test parent_graph(nddg1) != parent_graph(nddg2) - @test nddg1 == nddg2 - rem_edge!(nddg2, ("X", 1) => ("Y", 1)) - add_edge!(nddg2, ("Y", 1) => ("X", 1)) - @test nddg1 != nddg2 -end - -@testset "AbstractNamedGraph vertex renaming" begin - g = grid((2, 2)) - integer_names = collect(1:4) - string_names = ["A", "B", "C", "D"] - tuple_names = [("X", 1), ("X", 2), ("Y", 1), ("Y", 2)] - function_name = x -> reverse(x) - - # NamedGraph - ng = NamedGraph(g, string_names) - # rename to integers - vmap_int = Dictionary(vertices(ng), integer_names) - ng_int = rename_vertices(ng, vmap_int) - @test isa(ng_int, NamedGraph{Int}) - @test has_vertex(ng_int, 3) - @test has_edge(ng_int, 1 => 2) - @test has_edge(ng_int, 2 => 4) - # rename to tuples - vmap_tuple = Dictionary(vertices(ng), tuple_names) - ng_tuple = rename_vertices(ng, vmap_tuple) - @test isa(ng_tuple, NamedGraph{Tuple{String,Int}}) - @test has_vertex(ng_tuple, ("X", 1)) - @test has_edge(ng_tuple, ("X", 1) => ("X", 2)) - @test has_edge(ng_tuple, ("X", 2) => ("Y", 2)) - # rename with name map function - ng_function = rename_vertices(ng_tuple, function_name) - @test isa(ng_function, NamedGraph{Tuple{Int,String}}) - @test has_vertex(ng_function, (1, "X")) - @test has_edge(ng_function, (1, "X") => (2, "X")) - @test has_edge(ng_function, (2, "X") => (2, "Y")) - - # NamedDimGraph - ndg = named_grid((2, 2)) - # rename to integers - vmap_int = Dictionary(vertices(ndg), integer_names) - ndg_int = rename_vertices(ndg, vmap_int) - @test isa(ndg_int, NamedDimGraph{Tuple}) - @test has_vertex(ndg_int, (1,)) - @test has_edge(ndg_int, (1,) => (2,)) - @test has_edge(ndg_int, (2,) => (4,)) - # rename to strings - vmap_string = Dictionary(vertices(ndg), string_names) - ndg_string = rename_vertices(ndg, vmap_string) - @test isa(ndg_string, NamedDimGraph{Tuple}) - @test has_vertex(ndg_string, ("A",)) - @test has_edge(ndg_string, ("A",) => ("B",)) - @test has_edge(ndg_string, ("B",) => ("D",)) - # rename to strings - vmap_tuple = Dictionary(vertices(ndg), tuple_names) - ndg_tuple = rename_vertices(ndg, vmap_tuple) - @test isa(ndg_tuple, NamedDimGraph{Tuple}) - @test has_vertex(ndg_tuple, "X", 1) - @test has_edge(ndg_tuple, ("X", 1) => ("X", 2)) - @test has_edge(ndg_tuple, ("X", 2) => ("Y", 2)) - # rename with name map function - ndg_function = rename_vertices(ndg_tuple, function_name) - @test isa(ndg_function, NamedDimGraph{Tuple}) - @test has_vertex(ndg_function, 1, "X") - @test has_edge(ndg_function, (1, "X") => (2, "X")) - @test has_edge(ndg_function, (2, "X") => (2, "Y")) - - # NamedDimDiGraph - nddg = NamedDimDiGraph(DiGraph(collect(edges(g))), vertices(ndg)) - # rename to integers - vmap_int = Dictionary(vertices(nddg), integer_names) - nddg_int = rename_vertices(nddg, vmap_int) - @test isa(nddg_int, NamedDimDiGraph{Tuple}) - @test has_vertex(nddg_int, (1,)) - @test has_edge(nddg_int, (1,) => (2,)) - @test has_edge(nddg_int, (2,) => (4,)) - # rename to strings - vmap_string = Dictionary(vertices(nddg), string_names) - nddg_string = rename_vertices(nddg, vmap_string) - @test isa(nddg_string, NamedDimDiGraph{Tuple}) - @test has_vertex(nddg_string, ("A",)) - @test has_edge(nddg_string, ("A",) => ("B",)) - @test has_edge(nddg_string, ("B",) => ("D",)) - @test !has_edge(nddg_string, ("D",) => ("B",)) - # rename to strings - vmap_tuple = Dictionary(vertices(nddg), tuple_names) - nddg_tuple = rename_vertices(nddg, vmap_tuple) - @test isa(nddg_tuple, NamedDimDiGraph{Tuple}) - @test has_vertex(nddg_tuple, "X", 1) - @test has_edge(nddg_tuple, ("X", 1) => ("X", 2)) - @test !has_edge(nddg_tuple, ("Y", 2) => ("X", 2)) - # rename with name map function - nddg_function = rename_vertices(nddg_tuple, function_name) - @test isa(nddg_function, NamedDimDiGraph{Tuple}) - @test has_vertex(nddg_function, 1, "X") - @test has_edge(nddg_function, (1, "X") => (2, "X")) - @test has_edge(nddg_function, (2, "X") => (2, "Y")) -end diff --git a/test/test_opsum_to_ttno.jl b/test/test_opsum_to_ttno.jl index ade2ac6a..b340270d 100644 --- a/test/test_opsum_to_ttno.jl +++ b/test/test_opsum_to_ttno.jl @@ -2,7 +2,6 @@ using Test using ITensorNetworks using ITensors using Random -include("utils.jl") @testset "OpSum to TTNO" begin # small comb tree @@ -20,7 +19,7 @@ include("utils.jl") J1 = -1 J2 = 2 h = 0.5 - H = ising_graph(c; J1=J1, J2=J2, h=h) + H = ising(c; J1=J1, J2=J2, h=h) # add combination of longer range interactions Hlr = copy(H) diff --git a/test/test_ttno.jl b/test/test_ttno.jl index 80cf848f..bb65c6d1 100644 --- a/test/test_ttno.jl +++ b/test/test_ttno.jl @@ -12,7 +12,7 @@ using Random dmap = v -> rand(1:3) is = siteinds(dmap, c) # operator site inds - is_isp = merge(is, prime(is; links=[])) + is_isp = union_all_inds(is, prime(is; links=[])) # specify random linear vertex ordering of graph vertices vertex_order = shuffle(vertices(c)) diff --git a/test/utils.jl b/test/utils.jl deleted file mode 100644 index 1709f3a5..00000000 --- a/test/utils.jl +++ /dev/null @@ -1,81 +0,0 @@ -using Graphs: AbstractGraph, grid -using Dictionaries: AbstractDictionary - -_maybe_fill(x, n) = x -_maybe_fill(x::Number, n) = fill(x, n) - -""" -Random field J1-J2 Heisenberg model on a general graph -""" -function heisenberg_graph( - g::AbstractGraph; J1=1.0, J2=0.0, h::Union{<:Real,Vector{<:Real}}=0 -) - h = _maybe_fill(h, nv(g)) - H = OpSum() - if !iszero(J1) - for e in edges(g) - H += J1 / 2, "S+", src(e), "S-", dst(e) - H += J1 / 2, "S-", src(e), "S+", dst(e) - H += J1, "Sz", src(e), "Sz", dst(e) - end - end - if !iszero(J2) - # TODO, more clever way of looping over next to nearest neighbors? - for (i, v) in enumerate(vertices(g)) - nnn = [neighbors(g, n) for n in neighbors(g, v)] - nnn = setdiff(Base.Iterators.flatten(nnn), neighbors(g, v)) - nnn = setdiff(nnn, vertices(g)[1:i]) - for nn in nnn - H += J2 / 2, "S+", v, "S-", nn - H += J2 / 2, "S-", v, "S+", nn - H += J2, "Sz", v, "Sz", nn - end - end - end - for (i, v) in enumerate(vertices(g)) - if !iszero(h[i]) - H -= h[i], "Sz", v - end - end - return H -end - -""" -Next-to-nearest-neighbor Ising model (ZZX) on a general graph -""" -function ising_graph(g::AbstractGraph; J1=-1.0, J2=0.0, h::Union{<:Real,Vector{<:Real}}=0) - h = _maybe_fill(h, nv(g)) - H = OpSum() - if !iszero(J1) - for e in edges(g) - H += J1, "Z", src(e), "Z", dst(e) - end - end - if !iszero(J2) - # TODO, more clever way of looping over next to nearest neighbors? - for (i, v) in enumerate(vertices(g)) - nnn = [neighbors(g, n) for n in neighbors(g, v)] - nnn = setdiff(Base.Iterators.flatten(nnn), neighbors(g, v)) - nnn = setdiff(nnn, vertices(g)[1:i]) - for nn in nnn - H += J2, "Z", v, "Z", nn - end - end - end - for (i, v) in enumerate(vertices(g)) - if !iszero(h[i]) - H += h[i], "X", v - end - end - return H -end - -""" -Random field J1-J2 Heisenberg model on a chain of length N -""" -heisenberg(N; kwargs...) = heisenberg_graph(grid((N,)); kwargs...) - -""" -Next-to-nearest-neighbor Ising model (ZZX) on a chain of length N -""" -ising(N; kwargs...) = ising(grid((N,)); kwargs...) From 0eb9ef98ab7d758117c3918452c9fc8831b4e914 Mon Sep 17 00:00:00 2001 From: leburgel Date: Fri, 4 Nov 2022 14:46:49 -0700 Subject: [PATCH 04/13] Remove temporary `expect` in favor or `expect.jl` merged from main Add support for choosing a subset of sites in `expect`, similar to MPS implementation Condition expectation value type on `leaf_eltype` of state --- src/ITensorNetworks.jl | 2 +- src/expect.jl | 7 +++++-- src/imports.jl | 4 ---- src/treetensornetwork/ttno.jl | 2 +- src/treetensornetwork/ttns.jl | 19 ------------------- 5 files changed, 7 insertions(+), 27 deletions(-) diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 2659ada8..33d27205 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -17,7 +17,7 @@ using StaticArrays using Suppressor # TODO: export from ITensors -using ITensors: commontags, @Algorithm_str, Algorithm +using ITensors: commontags, @Algorithm_str, Algorithm, OneITensor using Graphs: AbstractEdge, AbstractGraph, Graph, add_edge! using MultiDimDictionaries: IndexType, SliceIndex diff --git a/src/expect.jl b/src/expect.jl index 2bbdf6bb..ca6e65c1 100644 --- a/src/expect.jl +++ b/src/expect.jl @@ -5,14 +5,17 @@ function ITensors.expect( maxdim=nothing, ortho=false, sequence=nothing, + sites=vertices(ψ), ) s = siteinds(ψ) - res = Dictionary(vertices(ψ), Vector{Float64}(undef, nv(ψ))) + ElT = promote_itensor_eltype(ψ) + # ElT = ishermitian(ITensors.op(op, s[sites[1]])) ? real(ElT) : ElT + res = Dictionary(sites, Vector{ElT}(undef, length(sites))) if isnothing(sequence) sequence = contraction_sequence(flattened_inner_network(ψ, ψ)) end normψ² = norm2(ψ; sequence) - for v in vertices(ψ) + for v in sites O = ITensor(Op(op, v), s) Oψ = apply(O, ψ; cutoff, maxdim, ortho) res[v] = contract_inner(ψ, Oψ; sequence) / normψ² diff --git a/src/imports.jl b/src/imports.jl index 41073d15..afa8986e 100644 --- a/src/imports.jl +++ b/src/imports.jl @@ -29,7 +29,6 @@ import ITensors: loginner, norm, lognorm, - expect, # truncation truncate!, truncate, @@ -74,9 +73,6 @@ import ITensors: nsite, # promotion and conversion promote_itensor_eltype, - scalartype, - # promotion and conversion - promote_itensor_eltype, scalartype using ITensors.ContractionSequenceOptimization: deepmap diff --git a/src/treetensornetwork/ttno.jl b/src/treetensornetwork/ttno.jl index f8029fea..e14cee5a 100644 --- a/src/treetensornetwork/ttno.jl +++ b/src/treetensornetwork/ttno.jl @@ -49,7 +49,7 @@ function TreeTensorNetworkOperator( end # -# Expectation values +# Inner products # # TODO: implement using multi-graph disjoint union diff --git a/src/treetensornetwork/ttns.jl b/src/treetensornetwork/ttns.jl index 0068054c..d4fbb552 100644 --- a/src/treetensornetwork/ttns.jl +++ b/src/treetensornetwork/ttns.jl @@ -107,22 +107,3 @@ end function replacebond(T0::TTNS, args...; kwargs...) return replacebond!(copy(T0), args...; kwargs...) end - -# -# Expectation values -# - -# TODO: temporary patch, to be implemented properly -function expect(psi::TTNS, opname::String; kwargs...) - s = siteinds(psi) - sites = get(kwargs, :sites, vertices(psi)) - res = Dictionary(sites, Vector{ComplexF64}(undef, length(sites))) - norm2_psi = inner(psi, psi) - for v in sites - Opsi = copy(psi) - Opsi[v] *= op(opname, s[v]) - noprime!(Opsi[v]) - res[v] = inner(psi, Opsi) / norm2_psi - end - return res -end From d337f7c33c6ae06c9efbfcdb191bc0a302a94640 Mon Sep 17 00:00:00 2001 From: leburgel Date: Mon, 28 Nov 2022 13:03:19 +0100 Subject: [PATCH 05/13] Use `convert_eltype` instead of `convert_leaf_eltype` where appropriate, add some tests --- src/abstractitensornetwork.jl | 2 +- src/itensornetwork.jl | 2 +- test/test_itensornetwork.jl | 42 +++++++++++++++++++++++++++++++---- 3 files changed, 40 insertions(+), 6 deletions(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 1ada1d25..5a3ed64c 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -110,7 +110,7 @@ ITensors.scalartype(tn::AbstractITensorNetwork) = LinearAlgebra.promote_leaf_elt # TODO: mimic ITensors.AbstractMPS implementation using map function ITensors.convert_leaf_eltype(eltype::Type, tn::AbstractITensorNetwork) tn = copy(tn) - vertex_data(tn) .= ITensors.convert_leaf_eltype.(Ref(eltype), vertex_data(tn)) + vertex_data(tn) .= convert_eltype.(Ref(eltype), vertex_data(tn)) return tn end diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index 493006b0..a7255a6e 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -107,7 +107,7 @@ function ITensorNetwork( ) where {ElT<:Number} ψ = ITensorNetwork(is) for v in vertices(ψ) - ψ[v] = ITensors.convert_leaf_eltype(ElT, state(only(is[v]), states_map[v])) + ψ[v] = convert_eltype(ElT, state(only(is[v]), states_map[v])) end ψ = insert_links(ψ, edges(is)) return ψ diff --git a/test/test_itensornetwork.jl b/test/test_itensornetwork.jl index 9aa9a045..ee5589b7 100644 --- a/test/test_itensornetwork.jl +++ b/test/test_itensornetwork.jl @@ -92,11 +92,45 @@ using Test @test linkinds(only, ψ, e) == only(commoninds(ψ[1, 1], ψ[2, 1])) end - @testset "ElType in constructors" begin - # TODO + @testset "ElType in constructors, $ElT" for ElT in (Float32, Float64, ComplexF64) + dims = (2, 2) + g = named_grid(dims) + s = siteinds("S=1/2", g) + ψ = ITensorNetwork(ElT, s; link_space=2) + @test ITensors.scalartype(ψ) == ElT + end + + @testset "ElType conversion, $new_eltype" for new_eltype in (Float32, ComplexF64) + dims = (2, 2) + g = named_grid(dims) + s = siteinds("S=1/2", g) + ψ = randomITensorNetwork(s; link_space=2) + @test ITensors.scalartype(ψ) == Float64 + + ϕ = ITensors.convert_leaf_eltype(new_eltype, ψ) + @test ITensors.scalartype(ϕ) == new_eltype end - @testset "Construction from state (function)" begin - # TODO + @testset "Construction from state map" for ElT in (Float32, ComplexF64) + dims = (2, 2) + g = named_grid(dims) + s = siteinds("S=1/2", g) + state_map(v::Tuple) = iseven(sum(isodd.(v))) ? "↑" : "↓" + + ψ = ITensorNetwork(s, state_map) + t = ψ[2, 2] + si = siteinds(only, ψ, 2, 2) + bi = map(e -> linkinds(only, ψ, e), incident_edges(ψ, 2, 2)) + @test eltype(t) == Float64 + @test abs(t[si => "↑", [b => end for b in bi]...]) == 1.0 # insert_links introduces extra signs through factorization... + @test t[si => "↓", [b => end for b in bi]...] == 0.0 + + ϕ = ITensorNetwork(ElT, s, state_map) + t = ϕ[2, 2] + si = siteinds(only, ϕ, 2, 2) + bi = map(e -> linkinds(only, ϕ, e), incident_edges(ϕ, 2, 2)) + @test eltype(t) == ElT + @test abs(t[si => "↑", [b => end for b in bi]...]) == convert(ElT, 1.0) # insert_links introduces extra signs through factorization... + @test t[si => "↓", [b => end for b in bi]...] == convert(ElT, 0.0) end end From 77be11cdca2d09acd2e15a1d9f052b58b799c75f Mon Sep 17 00:00:00 2001 From: leburgel Date: Mon, 28 Nov 2022 15:29:36 +0100 Subject: [PATCH 06/13] Change `linkdims` to return `NamedDimDataGraph` --- src/abstractitensornetwork.jl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 5a3ed64c..4d87497e 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -629,7 +629,11 @@ function linkdim(tn::AbstractITensorNetwork, edge::AbstractEdge) end function linkdims(tn::AbstractITensorNetwork) - return Dictionary(edges(tn), map(e -> linkdim(tn, e), edges(tn))) + ld = NamedDimDataGraph{Int,Int}(copy(underlying_graph(tn))) + for e in edges(ld) + ld[e] = linkdim(tn, e) + end + return ld end # From 40a9975f54e5ee246fb1a66a7c81565451125a1a Mon Sep 17 00:00:00 2001 From: leburgel Date: Wed, 21 Dec 2022 18:00:41 +0100 Subject: [PATCH 07/13] Move implementation to `loginner`, make `logdot` the alias. --- .../abstracttreetensornetwork.jl | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/treetensornetwork/abstracttreetensornetwork.jl b/src/treetensornetwork/abstracttreetensornetwork.jl index efd60c82..ca7257e0 100644 --- a/src/treetensornetwork/abstracttreetensornetwork.jl +++ b/src/treetensornetwork/abstracttreetensornetwork.jl @@ -1,7 +1,9 @@ # TODO: Replace `AbstractITensorNetwork` with a trait `IsTree`. abstract type AbstractTreeTensorNetwork{V} <: AbstractITensorNetwork{V} end -underlying_graph_type(G::Type{<:AbstractTreeTensorNetwork}) = underlying_graph_type(data_graph_type(G)) +function underlying_graph_type(G::Type{<:AbstractTreeTensorNetwork}) + return underlying_graph_type(data_graph_type(G)) +end # # Field access @@ -21,7 +23,9 @@ end isortho(ψ::AbstractTreeTensorNetwork) = isone(length(ortho_center(ψ))) -function set_ortho_center(ψ::AbstractTreeTensorNetwork{V}, new_center::Vector{<:V}) where {V} +function set_ortho_center( + ψ::AbstractTreeTensorNetwork{V}, new_center::Vector{<:V} +) where {V} return typeof(ψ)(itensor_network(ψ), new_center) end @@ -220,7 +224,7 @@ function lognorm(ψ::AbstractTreeTensorNetwork) if isortho(ψ) return log(norm(ψ[only(ortho_center(ψ))])) end - lognorm2_ψ = logdot(ψ, ψ) + lognorm2_ψ = loginner(ψ, ψ) rtol = eps(real(scalartype(ψ))) * 10 atol = rtol if !IsApprox.isreal(lognorm2_ψ, Approx(; rtol=rtol, atol=atol)) @@ -229,12 +233,12 @@ function lognorm(ψ::AbstractTreeTensorNetwork) return 0.5 * real(lognorm2_ψ) end -function loginner(ψ1::TTNT, ψ2::TTNT; kwargs...) where {TTNT<:AbstractTreeTensorNetwork} - return logdot(ψ1, ψ2; kwargs...) +function logdot(ψ1::TTNT, ψ2::TTNT; kwargs...) where {TTNT<:AbstractTreeTensorNetwork} + return loginner(ψ1, ψ2; kwargs...) end # TODO: stick with this traversal or find optimal contraction sequence? -function logdot( +function loginner( ψ1::TTNT, ψ2::TTNT; root_vertex=default_root_vertex(ψ1, ψ2) )::Number where {TTNT<:AbstractTreeTensorNetwork} N = nv(ψ1) From a04d3cf93aef785591166639d7c0e247f511a9e2 Mon Sep 17 00:00:00 2001 From: leburgel Date: Thu, 22 Dec 2022 14:58:46 +0100 Subject: [PATCH 08/13] No more mutables; remove many in-place operations. --- src/expect.jl | 2 +- src/exports.jl | 4 +- src/imports.jl | 2 - src/itensornetwork.jl | 49 ++++++++++++------- src/treetensornetwork/abstractprojttno.jl | 10 ++-- .../abstracttreetensornetwork.jl | 4 +- src/treetensornetwork/opsum_to_ttno.jl | 44 ++++++++--------- src/treetensornetwork/projttno.jl | 11 +++-- src/treetensornetwork/projttnosum.jl | 17 +++---- src/treetensornetwork/ttno.jl | 22 ++++----- src/treetensornetwork/ttns.jl | 21 ++++---- 11 files changed, 99 insertions(+), 87 deletions(-) diff --git a/src/expect.jl b/src/expect.jl index addbfd9a..b25fc937 100644 --- a/src/expect.jl +++ b/src/expect.jl @@ -15,7 +15,7 @@ function expect( sequence = contraction_sequence(inner_network(ψ, ψ; flatten=true)) end normψ² = norm_sqr(ψ; sequence) - for v in vertices(ψ) + for v in sites O = ITensor(Op(op, v), s) Oψ = apply(O, ψ; cutoff, maxdim, ortho) res[v] = contract_inner(ψ, Oψ; sequence) / normψ² diff --git a/src/exports.jl b/src/exports.jl index f40af2bc..c427f75f 100644 --- a/src/exports.jl +++ b/src/exports.jl @@ -28,7 +28,7 @@ export grid, # NamedGraphs # -export named_binary_tree, +export named_binary_tree, named_grid, is_tree, parent_vertex, @@ -82,6 +82,8 @@ export AbstractITensorNetwork, TTNO, ProjTTNO, ProjTTNOSum, + set_nsite, + position, finite_state_machine, # contraction_sequences.jl contraction_sequence, diff --git a/src/imports.jl b/src/imports.jl index 0f026858..c2b3ed8d 100644 --- a/src/imports.jl +++ b/src/imports.jl @@ -79,8 +79,6 @@ import ITensors: linkdims, maxlinkdim, # projected operators - position!, - set_nsite!, product, nsite, # promotion and conversion diff --git a/src/itensornetwork.jl b/src/itensornetwork.jl index ec375e6f..89f03ceb 100644 --- a/src/itensornetwork.jl +++ b/src/itensornetwork.jl @@ -17,10 +17,16 @@ end data_graph(tn::ITensorNetwork) = getfield(tn, :data_graph) data_graph_type(TN::Type{<:ITensorNetwork}) = fieldtype(TN, :data_graph) -underlying_graph_type(TN::Type{<:ITensorNetwork}) = fieldtype(data_graph_type(TN), :underlying_graph) +function underlying_graph_type(TN::Type{<:ITensorNetwork}) + return fieldtype(data_graph_type(TN), :underlying_graph) +end -ITensorNetwork{V}(data_graph::DataGraph{V}) where {V} = ITensorNetwork{V}(Private(), copy(data_graph)) -ITensorNetwork{V}(data_graph::DataGraph) where {V} = ITensorNetwork{V}(Private(), DataGraph{V}(data_graph)) +function ITensorNetwork{V}(data_graph::DataGraph{V}) where {V} + return ITensorNetwork{V}(Private(), copy(data_graph)) +end +function ITensorNetwork{V}(data_graph::DataGraph) where {V} + return ITensorNetwork{V}(Private(), DataGraph{V}(data_graph)) +end ITensorNetwork(data_graph::DataGraph) = ITensorNetwork{vertextype(data_graph)}(data_graph) @@ -65,23 +71,25 @@ end # # catch-all for default ElType -function ITensorNetwork{V}(g::AbstractGraph, args...; kwargs...) where {V} - return ITensorNetwork{V}(Float64, g, args...; kwargs...) +function (::Type{ITNT})(g::AbstractGraph, args...; kwargs...) where {ITNT<:ITensorNetwork} + return ITNT(Float64, g, args...; kwargs...) end -function ITensorNetwork(g::AbstractGraph, args...; kwargs...) - return ITensorNetwork(Float64, g, args...; kwargs...) -end - -function ITensorNetwork{V}(::Type{ElT}, g::AbstractNamedGraph; kwargs...) where {V,ElT<:Number} +function ITensorNetwork{V}( + ::Type{ElT}, g::AbstractNamedGraph; kwargs... +) where {V,ElT<:Number} return ITensorNetwork{V}(ElT, IndsNetwork{V}(g; kwargs...)) end -function ITensorNetwork(::Type{ElT}, graph::AbstractNamedGraph; kwargs...) where {ElT<:Number} +function ITensorNetwork( + ::Type{ElT}, graph::AbstractNamedGraph; kwargs... +) where {ElT<:Number} return ITensorNetwork{vertextype(graph)}(ElT, graph; kwargs...) end -function ITensorNetwork(::Type{ElT}, g::Graphs.SimpleGraphs.AbstractSimpleGraph; kwargs...) where {ElT<:Number} +function ITensorNetwork( + ::Type{ElT}, g::Graphs.SimpleGraphs.AbstractSimpleGraph; kwargs... +) where {ElT<:Number} return ITensorNetwork(ElT, IndsNetwork(g; kwargs...)) end @@ -89,7 +97,9 @@ end # Construction from IndsNetwork # -function ITensorNetwork{V}(::Type{ElT}, inds_network::IndsNetwork; kwargs...) where {V,ElT<:Number} +function ITensorNetwork{V}( + ::Type{ElT}, inds_network::IndsNetwork; kwargs... +) where {V,ElT<:Number} # Graphs.jl uses `zero` to create a graph of the same type # without any vertices or edges. inds_network_merge = typeof(inds_network)(underlying_graph(inds_network); kwargs...) @@ -103,13 +113,18 @@ function ITensorNetwork{V}(::Type{ElT}, inds_network::IndsNetwork; kwargs...) wh end for v in vertices(tn) siteinds = get(inds_network, v, indtype(inds_network)[]) - linkinds = [get(inds_network, edgetype(inds_network)(v, nv), indtype(inds_network)[]) for nv in neighbors(inds_network, v)] + linkinds = [ + get(inds_network, edgetype(inds_network)(v, nv), indtype(inds_network)[]) for + nv in neighbors(inds_network, v) + ] setindex_preserve_graph!(tn, ITensor(ElT, siteinds, linkinds...), v) end return tn end -function ITensorNetwork(::Type{ElT}, inds_network::IndsNetwork; kwargs...) where {ElT<:Number} +function ITensorNetwork( + ::Type{ElT}, inds_network::IndsNetwork; kwargs... +) where {ElT<:Number} return ITensorNetwork{vertextype(inds_network)}(ElT, inds_network; kwargs...) end @@ -128,9 +143,7 @@ function insert_links(ψ::ITensorNetwork, edges::Vector=edges(ψ); cutoff=1e-15) return ψ end -function ITensorNetwork( - ::Type{ElT}, is::IndsNetwork, states_map -) where {ElT<:Number} +function ITensorNetwork(::Type{ElT}, is::IndsNetwork, states_map) where {ElT<:Number} ψ = ITensorNetwork(is) for v in vertices(ψ) ψ[v] = convert_eltype(ElT, state(only(is[v]), states_map[v])) diff --git a/src/treetensornetwork/abstractprojttno.jl b/src/treetensornetwork/abstractprojttno.jl index 84d8785d..8ca288a4 100644 --- a/src/treetensornetwork/abstractprojttno.jl +++ b/src/treetensornetwork/abstractprojttno.jl @@ -2,7 +2,10 @@ abstract type AbstractProjTTNO{V} end copy(::AbstractProjTTNO) = error("Not implemented") -set_nsite!(::AbstractProjTTNO, nsite) = error("Not implemented") +set_nsite(::AbstractProjTTNO, nsite) = error("Not implemented") + +# silly constructor wrapper +shift_position(::AbstractProjTTNO, pos) = error("Not implemented") make_environment!(::AbstractProjTTNO, psi, e) = error("Not implemented") @@ -39,6 +42,7 @@ function internal_edges(P::AbstractProjTTNO{V})::Vector{NamedEdge{V}} where {V} return collect(Base.Iterators.flatten(edges)) end +environment(P::AbstractProjTTNO, edge::Pair) = environment(P, edgetype(P)(edge)) function environment(P::AbstractProjTTNO{V}, edge::NamedEdge{V})::ITensor where {V} return P.environments[edge] end @@ -124,11 +128,11 @@ function Base.size(P::AbstractProjTTNO)::Tuple{Int,Int} return (d, d) end -function position!( +function position( P::AbstractProjTTNO{V}, psi::TTNS{V}, pos::Union{Vector{<:V},NamedEdge{V}} ) where {V} # shift position; TODO: update for immutable struct - P.pos = pos + P = shift_position(P, pos) # invalidate environments corresponding to internal edges for e in internal_edges(P) unset!(P.environments, e) diff --git a/src/treetensornetwork/abstracttreetensornetwork.jl b/src/treetensornetwork/abstracttreetensornetwork.jl index ca7257e0..963d4b5f 100644 --- a/src/treetensornetwork/abstracttreetensornetwork.jl +++ b/src/treetensornetwork/abstracttreetensornetwork.jl @@ -124,7 +124,7 @@ function truncate( end # For ambiguity error -function truncate(ψ::AbstractTreeTensorNetwork, edge::AbstractEdge; kwargs...) +function truncate(tn::AbstractTreeTensorNetwork, edge::AbstractEdge; kwargs...) return typeof(tn)(truncate(ITensorNetwork(tn), edge; kwargs...)) end @@ -341,7 +341,7 @@ end function permute( ψ::TTNT, ::Tuple{typeof(linkind),typeof(siteinds),typeof(linkind)} )::TTNT where {TTNT<:AbstractTreeTensorNetwork} - ψ̃ = TTNT(underlying_graph(ψ)) + ψ̃ = copy(ψ) for v in vertices(ψ) ls = [only(linkinds(ψ, n => v)) for n in neighbors(ψ, v)] # TODO: won't work for multiple indices per link... ss = sort(Tuple(siteinds(ψ, v)); by=plev) diff --git a/src/treetensornetwork/opsum_to_ttno.jl b/src/treetensornetwork/opsum_to_ttno.jl index 163f51be..f7366a0a 100644 --- a/src/treetensornetwork/opsum_to_ttno.jl +++ b/src/treetensornetwork/opsum_to_ttno.jl @@ -44,15 +44,15 @@ Base.zero(t::Scaled) = zero(typeof(t)) # """ - finite_state_machine(os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple) where {C} + finite_state_machine(os::OpSum{C}, sites::IndsNetwork{V,<:Index}, root_vertex::V) where {C,V} Finite state machine generator for ITensors.OpSum Hamiltonian defined on a tree graph. The site Index graph must be a tree graph, and the chosen root must be a leaf vertex of this tree. Returns a DataGraph of SparseArrayKit.SparseArrays """ function finite_state_machine( - os::OpSum{C}, sites::IndsNetwork{V,<:Index}, root_vertex::Tuple -) where {V,C} + os::OpSum{C}, sites::IndsNetwork{V,<:Index}, root_vertex::V +) where {C,V} os = deepcopy(os) os = sorteachterm(os, sites, root_vertex) os = ITensors.sortmergeterms(os) @@ -60,9 +60,7 @@ function finite_state_machine( ValType = ITensors.determineValType(ITensors.terms(os)) # sparse symbolic representation of the TTNO Hamiltonian as a DataGraph of SparseArrays - sparseTTNO = DataGraph{V,SparseArray{Scaled{ValType,Prod{Op}}}}( - underlying_graph(sites) - ) + sparseTTNO = DataGraph{V,SparseArray{Scaled{ValType,Prod{Op}}}}(underlying_graph(sites)) # some things to keep track of vs = post_order_dfs_vertices(sites, root_vertex) # store vertices in fixed ordering relative to root @@ -119,7 +117,7 @@ function finite_state_machine( # if term starts at this site, add its coefficient as a site factor site_coef = one(C) if (isempty(dims_in) || all(T_inds[dims_in] .== -1)) && - ITensors.argument(term) ∉ site_coef_done + ITensors.argument(term) ∉ site_coef_done site_coef = ITensors.coefficient(term) push!(site_coef_done, ITensors.argument(term)) end @@ -177,14 +175,14 @@ function finite_state_machine( end """ - fsmTTNO(os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple, kwargs...) where {C} + fsmTTNO(os::OpSum{C}, sites::IndsNetwork{V,<:Index}, root_vertex::V, kwargs...) where {C,V} Construct a dense TreeTensorNetworkOperator from sparse finite state machine represenatation, without compression. """ function fsmTTNO( - os::OpSum{C}, sites::IndsNetwork{V,<:Index}, root_vertex::Tuple -)::TTNO where {V,C} + os::OpSum{C}, sites::IndsNetwork{V,<:Index}, root_vertex::V +)::TTNO where {C,V} ValType = ITensors.determineValType(ITensors.terms(os)) # start from finite state machine fsm, edge_orders = finite_state_machine(os, sites, root_vertex) @@ -217,14 +215,14 @@ end # this is broken for certain combinations of longer-range interactions, no idea why... """ - svdTTNO(os::OpSum{C}, sites::IndsNetwork{<:Index}, root_vertex::Tuple, kwargs...) where {C} + svdTTNO(os::OpSum{C}, sites::IndsNetwork{V<:Index}, root_vertex::V, kwargs...) where {C,V} Construct a dense TreeTensorNetworkOperator from a symbolic OpSum representation of a -Hamiltonian, compressin shared interaction channels. +Hamiltonian, compressing shared interaction channels. """ function svdTTNO( - os::OpSum{C}, sites::IndsNetwork{VT,<:Index}, root_vertex::Tuple; kwargs... -)::TTNO where {VT,C} + os::OpSum{C}, sites::IndsNetwork{VT,<:Index}, root_vertex::VT; kwargs... +)::TTNO where {C,VT} mindim::Int = get(kwargs, :mindim, 1) maxdim::Int = get(kwargs, :maxdim, 10000) cutoff::Float64 = get(kwargs, :cutoff, 1E-15) @@ -234,7 +232,7 @@ function svdTTNO( # some things to keep track of vs = post_order_dfs_vertices(sites, root_vertex) # store vertices in fixed ordering relative to root es = post_order_dfs_edges(sites, root_vertex) # store edges in fixed ordering relative to root - ranks = Dict(v => degree(sites, v) for v in vs) # rank of every TTNO tensor in network + ranks = Dict(v => degree(sites, v) for v in vs) # rank of every TTNO tensor in network Vs = Dict(e => Matrix{ValType}(undef, 1, 1) for e in es) # link isometries for SVD compression of TTNO leftmaps = Dict(e => Dict{Vector{Op},Int}() for e in es) # map from term in Hamiltonian to edge left channel index for every edge rightmaps = Dict(e => Dict{Vector{Op},Int}() for e in es) # map from term in Hamiltonian to edge right channel index for every edge @@ -306,7 +304,7 @@ function svdTTNO( # if term starts at this site, add its coefficient as a site factor site_coef = one(C) if (isempty(dims_in) || all(T_inds[dims_in] .== -1)) && - ITensors.argument(term) ∉ site_coef_done + ITensors.argument(term) ∉ site_coef_done site_coef = ITensors.coefficient(term) push!(site_coef_done, ITensors.argument(term)) end @@ -339,9 +337,9 @@ function svdTTNO( # compress this tempTTNO representation into dense form - link_space = dictionary([ - e => Index((isempty(rightmaps[e]) ? 0 : size(Vs[e], 2)) + 2, edge_tag(e)) for e in es - ]) + link_space = dictionary( + [e => Index((isempty(rightmaps[e]) ? 0 : size(Vs[e], 2)) + 2, edge_tag(e)) for e in es] + ) H = TTNO(sites) @@ -430,7 +428,7 @@ function isfermionic(t::Vector{Op}, sites::IndsNetwork{V,<:Index}) where {V} return (p == -1) end -# only(site(ops[1])) in ITensors breaks for tuple site labels, had to drop the only +# only(site(ops[1])) in ITensors breaks for Tuple site labels, had to drop the only function computeSiteProd(sites::IndsNetwork{V,<:Index}, ops::Prod{Op})::ITensor where {V} v = ITensors.site(ops[1]) T = op(sites[v], ITensors.which_op(ops[1]); ITensors.params(ops[1])...) @@ -443,7 +441,9 @@ function computeSiteProd(sites::IndsNetwork{V,<:Index}, ops::Prod{Op})::ITensor end # changed `isless_site` to use tree vertex ordering relative to root -function sorteachterm(os::OpSum, sites::IndsNetwork{V,<:Index}, root_vertex::Tuple) where {V} +function sorteachterm( + os::OpSum, sites::IndsNetwork{V,<:Index}, root_vertex::V +) where {V} os = copy(os) findpos(op::Op) = find_index_in_tree(op, sites, root_vertex) isless_site(o1::Op, o2::Op) = findpos(o1) < findpos(o2) @@ -519,7 +519,7 @@ Convert an OpSum object `os` to a TreeTensorNetworkOperator, with indices given function TTNO( os::OpSum, sites::IndsNetwork{V,<:Index}; - root_vertex::Tuple=default_root_vertex(sites), + root_vertex::V=default_root_vertex(sites), splitblocks=false, method::Symbol=:fsm, # default to construction from finite state machine until svdTTNO is fixed trunc=false, diff --git a/src/treetensornetwork/projttno.jl b/src/treetensornetwork/projttno.jl index 477f551a..c9a4d876 100644 --- a/src/treetensornetwork/projttno.jl +++ b/src/treetensornetwork/projttno.jl @@ -1,13 +1,10 @@ """ ProjTTNO """ -mutable struct ProjTTNO{V} <: AbstractProjTTNO{V} +struct ProjTTNO{V} <: AbstractProjTTNO{V} pos::Union{Vector{<:V},NamedEdge{V}} # TODO: cleanest way to specify effective Hamiltonian position? H::TTNO{V} environments::Dictionary{NamedEdge{V},ITensor} - function ProjTTNO(pos::Union{Vector{<:V},NamedEdge{V}}, H::TTNO{V}, environments::Dictionary{NamedEdge{V},ITensor}) where {V} - return new{V}(pos, H, environments) - end end function ProjTTNO(H::TTNO) @@ -18,10 +15,14 @@ copy(P::ProjTTNO) = ProjTTNO(P.pos, copy(P.H), copy(P.environments)) # trivial if we choose to specify position as above; only kept to allow using alongside # ProjMPO -function set_nsite!(P::ProjTTNO, nsite) +function set_nsite(P::ProjTTNO, nsite) return P end +function shift_position(P::ProjTTNO, pos) + return ProjTTNO(pos, P.H, P.environments) +end + function make_environment!(P::ProjTTNO{V}, psi::TTNS{V}, e::NamedEdge{V})::ITensor where {V} # invalidate environment for opposite edge direction if necessary reverse(e) ∈ incident_edges(P) || unset!(P.environments, reverse(e)) diff --git a/src/treetensornetwork/projttnosum.jl b/src/treetensornetwork/projttnosum.jl index 30835ecb..c06f318e 100644 --- a/src/treetensornetwork/projttnosum.jl +++ b/src/treetensornetwork/projttnosum.jl @@ -1,11 +1,11 @@ """ ProjTTNOSum """ -mutable struct ProjTTNOSum{V} +struct ProjTTNOSum{V} pm::Vector{ProjTTNO{V}} function ProjTTNOSum(pm::Vector{ProjTTNO{V}}) where {V} return new{V}(pm) - end + end end copy(P::ProjTTNOSum) = ProjTTNOSum(copy.(P.pm)) @@ -18,11 +18,8 @@ on_edge(P::ProjTTNOSum) = on_edge(P.pm[1]) nsite(P::ProjTTNOSum) = nsite(P.pm[1]) -function set_nsite!(Ps::ProjTTNOSum, nsite) - for P in Ps.pm - set_nsite!(P, nsite) - end - return Ps +function set_nsite(Ps::ProjTTNOSum, nsite) + return ProjTTNOSum(map(M -> set_nsite(M, nsite), Ps.pm)) end underlying_graph(P::ProjTTNOSum) = underlying_graph(P.pm[1]) @@ -55,10 +52,8 @@ end Base.size(P::ProjTTNOSum) = size(P.pm[1]) -function position!( +function position( P::ProjTTNOSum{V}, psi::TTNS{V}, pos::Union{Vector{<:V},NamedEdge{V}} ) where {V} - for M in P.pm - position!(M, psi, pos) - end + ProjTTNOSum(map(M -> position(M, psi, pos), P.pm)) end diff --git a/src/treetensornetwork/ttno.jl b/src/treetensornetwork/ttno.jl index 59130828..29f5b498 100644 --- a/src/treetensornetwork/ttno.jl +++ b/src/treetensornetwork/ttno.jl @@ -20,7 +20,9 @@ struct TreeTensorNetworkOperator{V} <: AbstractTreeTensorNetwork{V} end end -data_graph_type(G::Type{<:TreeTensorNetworkOperator}) = data_graph_type(fieldtype(G, :itensor_network)) +function data_graph_type(G::Type{<:TreeTensorNetworkOperator}) + return data_graph_type(fieldtype(G, :itensor_network)) +end function copy(ψ::TreeTensorNetworkOperator) return TreeTensorNetworkOperator(copy(ψ.itensor_network), copy(ψ.ortho_center)) @@ -32,7 +34,7 @@ const TTNO = TreeTensorNetworkOperator itensor_network(ψ::TreeTensorNetworkOperator) = getfield(ψ, :itensor_network) # Required for `AbstractITensorNetwork` interface -data_graph(ψ::TreeTensorNetworkOperator) = data_graph(ITensorNetwork(ψ)) +data_graph(ψ::TreeTensorNetworkOperator) = data_graph(itensor_network(ψ)) # # Constructor @@ -41,13 +43,11 @@ data_graph(ψ::TreeTensorNetworkOperator) = data_graph(ITensorNetwork(ψ)) TreeTensorNetworkOperator(tn::ITensorNetwork, args...) = TreeTensorNetworkOperator{vertextype(tn)}(tn, args...) # catch-all for default ElType -function TreeTensorNetworkOperator(graph::AbstractGraph, args...; kwargs...) - return TreeTensorNetworkOperator(Float64, graph, args...; kwargs...) +function (::Type{TTNT})(g::AbstractGraph, args...; kwargs...) where {TTNT<:TTNO} + return TTNT(Float64, g, args...; kwargs...) end -function TreeTensorNetworkOperator( - ::Type{ElT}, graph::AbstractGraph, args...; kwargs... -) where {ElT<:Number} +function TreeTensorNetworkOperator(::Type{ElT}, graph::AbstractGraph, args...; kwargs...) where {ElT<:Number} itensor_network = ITensorNetwork(ElT, graph; kwargs...) return TreeTensorNetworkOperator(itensor_network, args...) end @@ -143,10 +143,10 @@ end # Conversion # -function convert(::Type{TTNS}, T::TTNO) - return TTNS(ITensorNetwork(T), ortho_center(T)) +function convert(::Type{<:TTNS}, T::TTNO) + return TTNS(itensor_network(T), ortho_center(T)) end -function convert(::Type{TTNO}, T::TTNS) - return TTNO(ITensorNetwork(T), ortho_center(T)) +function convert(::Type{<:TTNO}, T::TTNS) + return TTNO(itensor_network(T), ortho_center(T)) end diff --git a/src/treetensornetwork/ttns.jl b/src/treetensornetwork/ttns.jl index 59f9e8e3..a8ac1723 100644 --- a/src/treetensornetwork/ttns.jl +++ b/src/treetensornetwork/ttns.jl @@ -1,5 +1,5 @@ """ - TreeTensorNetworkState <: AbstractITensorNetwork + TreeTensorNetworkState{V} <: AbstractITensorNetwork{V} # Fields @@ -18,7 +18,9 @@ struct TreeTensorNetworkState{V} <: AbstractTreeTensorNetwork{V} end end -data_graph_type(G::Type{<:TreeTensorNetworkState}) = data_graph_type(fieldtype(G, :itensor_network)) +function data_graph_type(G::Type{<:TreeTensorNetworkState}) + return data_graph_type(fieldtype(G, :itensor_network)) +end function copy(ψ::TreeTensorNetworkState) return TreeTensorNetworkState(copy(ψ.itensor_network), copy(ψ.ortho_center)) @@ -30,23 +32,20 @@ const TTNS = TreeTensorNetworkState itensor_network(ψ::TreeTensorNetworkState) = getfield(ψ, :itensor_network) # Required for `AbstractITensorNetwork` interface -data_graph(ψ::TreeTensorNetworkState) = data_graph(ITensorNetwork(ψ)) +data_graph(ψ::TreeTensorNetworkState) = data_graph(itensor_network(ψ)) # # Constructor # +TreeTensorNetworkState(tn::ITensorNetwork, args...) = TreeTensorNetworkState{vertextype(tn)}(tn, args...) + # catch-all for default ElType -function TreeTensorNetworkState(g::AbstractGraph, args...; kwargs...) - return TreeTensorNetworkState(Float64, g, args...; kwargs...) +function (::Type{TTNT})(g::AbstractGraph, args...; kwargs...) where {TTNT<:TTNS} + return TTNT(Float64, g, args...; kwargs...) end -TreeTensorNetworkState(tn::ITensorNetwork, args...) = TreeTensorNetworkState{vertextype(tn)}(tn, args...) - -# can defer almost everything to ITensorNework constructor -function TreeTensorNetworkState( - ::Type{ElT}, graph::AbstractGraph, args...; kwargs... -) where {ElT<:Number} +function TreeTensorNetworkState(::Type{ElT}, graph::AbstractGraph, args...; kwargs...) where {ElT<:Number} itensor_network = ITensorNetwork(ElT, graph; kwargs...) return TreeTensorNetworkState(itensor_network, args...) end From 152db5368946d6edad5838f9b3de169611510c86 Mon Sep 17 00:00:00 2001 From: leburgel Date: Mon, 9 Jan 2023 16:55:45 +0100 Subject: [PATCH 09/13] Remove in-place operations. --- src/ITensorNetworks.jl | 1 + src/abstractitensornetwork.jl | 66 +++++++++-------------- src/exports.jl | 1 + src/imports.jl | 2 - src/treetensornetwork/abstractprojttno.jl | 2 +- src/treetensornetwork/projttno_apply.jl | 55 +++++++++++++++++++ test/test_ttns.jl | 1 - 7 files changed, 83 insertions(+), 45 deletions(-) create mode 100644 src/treetensornetwork/projttno_apply.jl diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 96057b01..00e66a52 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -55,6 +55,7 @@ include(joinpath("treetensornetwork", "opsum_to_ttno.jl")) include(joinpath("treetensornetwork", "abstractprojttno.jl")) include(joinpath("treetensornetwork", "projttno.jl")) include(joinpath("treetensornetwork", "projttnosum.jl")) +include(joinpath("treetensornetwork", "projttno_apply.jl")) include("utility.jl") include("specialitensornetworks.jl") include("renameitensornetwork.jl") diff --git a/src/abstractitensornetwork.jl b/src/abstractitensornetwork.jl index 4b494abc..ecd2e633 100644 --- a/src/abstractitensornetwork.jl +++ b/src/abstractitensornetwork.jl @@ -342,11 +342,11 @@ function tags(tn::AbstractITensorNetwork, edge) return commontags(is) end -function svd!(tn::AbstractITensorNetwork, edge::Pair; kwargs...) - return svd!(tn, edgetype(tn)(edge)) +function svd(tn::AbstractITensorNetwork, edge::Pair; kwargs...) + return svd(tn, edgetype(tn)(edge)) end -function svd!( +function svd( tn::AbstractITensorNetwork, edge::AbstractEdge; U_vertex=src(edge), @@ -356,10 +356,11 @@ function svd!( v_tags=tags(tn, edge), kwargs..., ) + tn = copy(tn) left_inds = uniqueinds(tn, edge) U, S, V = svd(tn[src(edge)], left_inds; lefttags=u_tags, right_tags=v_tags, kwargs...) - rem_vertex!(tn, src(edge)) # TODO: avoid this if we can? + rem_vertex!(tn, src(edge)) add_vertex!(tn, U_vertex) tn[U_vertex] = U @@ -372,11 +373,7 @@ function svd!( return tn end -function svd(tn::AbstractITensorNetwork, edge; kwargs...) - return svd!(copy(tn), edge; kwargs...) -end - -function qr!( +function qr( tn::AbstractITensorNetwork, edge::AbstractEdge; Q_vertex=src(edge), @@ -384,10 +381,11 @@ function qr!( tags=tags(tn, edge), kwargs..., ) + tn = copy(tn) left_inds = uniqueinds(tn, edge) Q, R = factorize(tn[src(edge)], left_inds; tags, kwargs...) - rem_vertex!(tn, src(edge)) # TODO: avoid this if we can? + rem_vertex!(tn, src(edge)) add_vertex!(tn, Q_vertex) tn[Q_vertex] = Q @@ -397,11 +395,7 @@ function qr!( return tn end -function qr(tn::AbstractITensorNetwork, edge; kwargs...) - return qr!(copy(tn), edge; kwargs...) -end - -function factorize!( +function factorize( tn::AbstractITensorNetwork, edge::AbstractEdge; X_vertex=src(edge), @@ -417,7 +411,7 @@ function factorize!( left_inds = uniqueinds(tn, edge) X, Y = factorize(tn[src(edge)], left_inds; tags, kwargs...) - rem_vertex!(tn, src(edge)) # TODO: avoid this if we can? + rem_vertex!(tn, src(edge)) add_vertex!(tn, X_vertex) add_vertex!(tn, Y_vertex) @@ -436,16 +430,13 @@ function factorize!( return tn end -function factorize(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) - return factorize!(copy(tn), edge; kwargs...) -end - # For ambiguity error; TODO: decide whether to use graph mutating methods when resulting graph is unchanged? -function _orthogonalize_edge!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) - # factorize!(tn, edge; kwargs...) +function _orthogonalize_edge(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + # tn = factorize(tn, edge; kwargs...) # # TODO: Implement as `only(common_neighbors(tn, src(edge), dst(edge)))` # new_vertex = only(neighbors(tn, src(edge)) ∩ neighbors(tn, dst(edge))) # return contract(tn, new_vertex => dst(edge)) + tn = copy(tn) left_inds = uniqueinds(tn, edge) ltags = tags(tn, edge) X, Y = factorize(tn[src(edge)], left_inds; tags=ltags, ortho="left", kwargs...) @@ -454,31 +445,28 @@ function _orthogonalize_edge!(tn::AbstractITensorNetwork, edge::AbstractEdge; kw return tn end -function orthogonalize!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) - return _orthogonalize_edge!(tn, edge; kwargs...) +function orthogonalize(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + return _orthogonalize_edge(tn, edge; kwargs...) end -function orthogonalize!(tn::AbstractITensorNetwork, edge::Pair; kwargs...) - return orthogonalize!(tn, edgetype(tn)(edge); kwargs...) +function orthogonalize(tn::AbstractITensorNetwork, edge::Pair; kwargs...) + return orthogonalize(tn, edgetype(tn)(edge); kwargs...) end # Orthogonalize an ITensorNetwork towards a source vertex, treating # the network as a tree spanned by a spanning tree. # TODO: Rename `tree_orthogonalize`. -function orthogonalize!(ψ::AbstractITensorNetwork, source_vertex) +function orthogonalize(ψ::AbstractITensorNetwork, source_vertex) spanning_tree_edges = post_order_dfs_edges(bfs_tree(ψ, source_vertex), source_vertex) for e in spanning_tree_edges - orthogonalize!(ψ, e) + ψ = orthogonalize(ψ, e) end return ψ end -function orthogonalize(tn::AbstractITensorNetwork, args...; kwargs...) - return orthogonalize!(copy(tn), args...; kwargs...) -end - # TODO: decide whether to use graph mutating methods when resulting graph is unchanged? -function _truncate_edge!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) +function _truncate_edge(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + tn = copy(tn) left_inds = uniqueinds(tn, edge) ltags = tags(tn, edge) U, S, V = svd(tn[src(edge)], left_inds; lefttags=ltags, ortho="left", kwargs...) @@ -487,16 +475,12 @@ function _truncate_edge!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs. return tn end -function truncate!(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) - return _truncate_edge!(tn, edge; kwargs...) -end - -function truncate!(tn::AbstractITensorNetwork, edge::Pair; kwargs...) - return truncate!(tn, edgetype(tn)(edge); kwargs...) +function truncate(tn::AbstractITensorNetwork, edge::AbstractEdge; kwargs...) + return _truncate_edge(tn, edge; kwargs...) end -function truncate(tn::AbstractITensorNetwork, edge; kwargs...) - return truncate!(copy(tn), edge; kwargs...) +function truncate(tn::AbstractITensorNetwork, edge::Pair; kwargs...) + return truncate(tn, edgetype(tn)(edge); kwargs...) end function Base.:*(c::Number, ψ::AbstractITensorNetwork) diff --git a/src/exports.jl b/src/exports.jl index c427f75f..eaa8ac0f 100644 --- a/src/exports.jl +++ b/src/exports.jl @@ -82,6 +82,7 @@ export AbstractITensorNetwork, TTNO, ProjTTNO, ProjTTNOSum, + ProjTTNOApply, set_nsite, position, finite_state_machine, diff --git a/src/imports.jl b/src/imports.jl index c2b3ed8d..4645cbdb 100644 --- a/src/imports.jl +++ b/src/imports.jl @@ -34,7 +34,6 @@ import ITensors: # contraction contract, orthogonalize, - orthogonalize!, isortho, inner, loginner, @@ -42,7 +41,6 @@ import ITensors: lognorm, expect, # truncation - truncate!, truncate, replacebond!, replacebond, diff --git a/src/treetensornetwork/abstractprojttno.jl b/src/treetensornetwork/abstractprojttno.jl index 8ca288a4..bb5622a4 100644 --- a/src/treetensornetwork/abstractprojttno.jl +++ b/src/treetensornetwork/abstractprojttno.jl @@ -131,7 +131,7 @@ end function position( P::AbstractProjTTNO{V}, psi::TTNS{V}, pos::Union{Vector{<:V},NamedEdge{V}} ) where {V} - # shift position; TODO: update for immutable struct + # shift position P = shift_position(P, pos) # invalidate environments corresponding to internal edges for e in internal_edges(P) diff --git a/src/treetensornetwork/projttno_apply.jl b/src/treetensornetwork/projttno_apply.jl new file mode 100644 index 00000000..82d99a9d --- /dev/null +++ b/src/treetensornetwork/projttno_apply.jl @@ -0,0 +1,55 @@ +struct ProjTTNOApply{V} <: AbstractProjTTNO{V} + pos::Union{Vector{<:V},NamedEdge{V}} + psi0::TTNS{V} + H::TTNO{V} + environments::Dictionary{NamedEdge{V},ITensor} +end + +function ProjTTNOApply(psi0::TTNS, H::TTNO) + return ProjTTNOApply(vertextype(H)[], psi0, H, Dictionary{edgetype(H),ITensor}()) +end + +function copy(P::ProjTTNOApply) + return ProjTTNOApply(P.pos, copy(P.psi0), copy(P.H), copy(P.environments)) +end + +function set_nsite(P::ProjTTNOApply, nsite) + return P +end + +function shift_position(P::ProjTTNOApply, pos) + return ProjTTNOApply(pos, P.psi0, P.H, P.environments) +end + +function make_environment!(P::ProjTTNOApply{V}, psi::TTNS{V}, e::NamedEdge{V})::ITensor where {V} + # invalidate environment for opposite edge direction if necessary + reverse(e) ∈ incident_edges(P) || unset!(P.environments, reverse(e)) + # do nothing if valid environment already present + if haskey(P.environments, e) + env = environment(P, e) + else + if is_leaf(underlying_graph(P), src(e)) + # leaves are easy + env = P.psi0[src(e)] * P.H[src(e)] * dag(psi[src(e)]) + else + # construct by contracting neighbors + neighbor_envs = ITensor[] + for n in setdiff(neighbors(underlying_graph(P), src(e)), [dst(e)]) + push!(neighbor_envs, make_environment!(P, psi, edgetype(P)(n, src(e)))) + end + # manually heuristic for contraction order: two environments, site tensors, then + # other environments + frst, scnd, rst = _separate_first_two(neighbor_envs) + itensor_map = vcat(P.psi0[src(e)], frst, scnd, P.H[src(e)], dag(psi[src(e)]), rst) + # TODO: actually use optimal contraction sequence here + env = reduce(*, itensor_map) + end + # cache + set!(P.environments, e, env) + end + @assert( + hascommoninds(environment(P, e), psi[src(e)]), + "Something went wrong, probably re-orthogonalized this edge in the same direction twice!" + ) + return env +end diff --git a/test/test_ttns.jl b/test/test_ttns.jl index c9929b06..25fe8b38 100644 --- a/test/test_ttns.jl +++ b/test/test_ttns.jl @@ -30,7 +30,6 @@ using Random # dense array constructor from Vector{Index} and NamedDimGraph @disable_warn_order s4 = TTNS(AS, sites_s, c; vertex_order, cutoff) # see if this actually worked - @show ortho_center(s1) root_vertex = only(ortho_center(s1)) @disable_warn_order begin S1 = contract(s1, root_vertex) From e9f0c7a090a41038db1178eb950dae70f2204f72 Mon Sep 17 00:00:00 2001 From: leburgel Date: Mon, 9 Jan 2023 19:23:19 +0100 Subject: [PATCH 10/13] Remove unnecessary comment. --- README.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/README.md b/README.md index 04582a0c..d822a6a4 100644 --- a/README.md +++ b/README.md @@ -2,9 +2,6 @@ A package to provide general network data structures and tools to use with ITensors.jl. -This particular branch contains a draft of the extensions needed to implement sweeping -algorithms for tree tensor networks. - ## Installation You can install this package through the Julia package manager: From 6cb3e36b6771f02df117608adb14f3c56650f507 Mon Sep 17 00:00:00 2001 From: mtfishman Date: Mon, 9 Jan 2023 16:34:45 -0500 Subject: [PATCH 11/13] Fix imports.jl --- src/imports.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/imports.jl b/src/imports.jl index 4152e963..441794ff 100644 --- a/src/imports.jl +++ b/src/imports.jl @@ -32,7 +32,7 @@ import Graphs: SimpleGraph, is_directed, weights import KrylovKit: eigsolve, linsolve -import LinearAlgebra: factorize, normalize, normalize!, qr, svd, +import LinearAlgebra: factorize, normalize, normalize!, qr, svd import ITensors: # contraction From 591cd3c5bb6a2cf2dac7a28b422c97e4ae147ae6 Mon Sep 17 00:00:00 2001 From: mtfishman Date: Mon, 9 Jan 2023 16:42:49 -0500 Subject: [PATCH 12/13] Update directory name from treetensornetwork to treetensornetworks --- src/ITensorNetworks.jl | 18 +++++++++--------- .../abstractprojttno.jl | 0 .../abstracttreetensornetwork.jl | 0 .../opsum_to_ttno.jl | 4 ++-- .../projttno.jl | 0 .../projttno_apply.jl | 0 .../projttnosum.jl | 0 .../ttno.jl | 0 .../ttns.jl | 0 9 files changed, 11 insertions(+), 11 deletions(-) rename src/{treetensornetwork => treetensornetworks}/abstractprojttno.jl (100%) rename src/{treetensornetwork => treetensornetworks}/abstracttreetensornetwork.jl (100%) rename src/{treetensornetwork => treetensornetworks}/opsum_to_ttno.jl (99%) rename src/{treetensornetwork => treetensornetworks}/projttno.jl (100%) rename src/{treetensornetwork => treetensornetworks}/projttno_apply.jl (100%) rename src/{treetensornetwork => treetensornetworks}/projttnosum.jl (100%) rename src/{treetensornetwork => treetensornetworks}/ttno.jl (100%) rename src/{treetensornetwork => treetensornetworks}/ttns.jl (100%) diff --git a/src/ITensorNetworks.jl b/src/ITensorNetworks.jl index 6e2fb3c0..28654953 100644 --- a/src/ITensorNetworks.jl +++ b/src/ITensorNetworks.jl @@ -73,20 +73,20 @@ include("expect.jl") include("models.jl") include("tebd.jl") include("itensornetwork.jl") -include(joinpath("treetensornetwork", "abstracttreetensornetwork.jl")) -include(joinpath("treetensornetwork", "ttns.jl")) -include(joinpath("treetensornetwork", "ttno.jl")) -include(joinpath("treetensornetwork", "opsum_to_ttno.jl")) -include(joinpath("treetensornetwork", "abstractprojttno.jl")) -include(joinpath("treetensornetwork", "projttno.jl")) -include(joinpath("treetensornetwork", "projttnosum.jl")) -include(joinpath("treetensornetwork", "projttno_apply.jl")) include("utility.jl") include("specialitensornetworks.jl") include("renameitensornetwork.jl") include("boundarymps.jl") include("beliefpropagation.jl") -include(joinpath("treetensornetworks", "treetensornetwork.jl")) +include(joinpath("treetensornetworks", "abstracttreetensornetwork.jl")) +# include(joinpath("treetensornetworks", "treetensornetwork.jl")) +include(joinpath("treetensornetworks", "ttns.jl")) +include(joinpath("treetensornetworks", "ttno.jl")) +include(joinpath("treetensornetworks", "opsum_to_ttno.jl")) +include(joinpath("treetensornetworks", "abstractprojttno.jl")) +include(joinpath("treetensornetworks", "projttno.jl")) +include(joinpath("treetensornetworks", "projttnosum.jl")) +include(joinpath("treetensornetworks", "projttno_apply.jl")) # Compatibility of ITensor observer and Observers # TODO: Delete this include(joinpath("treetensornetworks", "solvers", "update_observer.jl")) diff --git a/src/treetensornetwork/abstractprojttno.jl b/src/treetensornetworks/abstractprojttno.jl similarity index 100% rename from src/treetensornetwork/abstractprojttno.jl rename to src/treetensornetworks/abstractprojttno.jl diff --git a/src/treetensornetwork/abstracttreetensornetwork.jl b/src/treetensornetworks/abstracttreetensornetwork.jl similarity index 100% rename from src/treetensornetwork/abstracttreetensornetwork.jl rename to src/treetensornetworks/abstracttreetensornetwork.jl diff --git a/src/treetensornetwork/opsum_to_ttno.jl b/src/treetensornetworks/opsum_to_ttno.jl similarity index 99% rename from src/treetensornetwork/opsum_to_ttno.jl rename to src/treetensornetworks/opsum_to_ttno.jl index f7366a0a..0d4bc9ad 100644 --- a/src/treetensornetwork/opsum_to_ttno.jl +++ b/src/treetensornetworks/opsum_to_ttno.jl @@ -567,11 +567,11 @@ function TTNO(o::Scaled{C,Op}, s::IndsNetwork; kwargs...) where {C} return TTNO(OpSum{C}() + o, s; kwargs...) end -function TTNO(o::Sum{Op}, s::IndsNetwork; kwargs...) where {C} +function TTNO(o::Sum{Op}, s::IndsNetwork; kwargs...) return TTNO(OpSum{Float64}() + o, s; kwargs...) end -function TTNO(o::Prod{Op}, s::IndsNetwork; kwargs...) where {C} +function TTNO(o::Prod{Op}, s::IndsNetwork; kwargs...) return TTNO(OpSum{Float64}() + o, s; kwargs...) end diff --git a/src/treetensornetwork/projttno.jl b/src/treetensornetworks/projttno.jl similarity index 100% rename from src/treetensornetwork/projttno.jl rename to src/treetensornetworks/projttno.jl diff --git a/src/treetensornetwork/projttno_apply.jl b/src/treetensornetworks/projttno_apply.jl similarity index 100% rename from src/treetensornetwork/projttno_apply.jl rename to src/treetensornetworks/projttno_apply.jl diff --git a/src/treetensornetwork/projttnosum.jl b/src/treetensornetworks/projttnosum.jl similarity index 100% rename from src/treetensornetwork/projttnosum.jl rename to src/treetensornetworks/projttnosum.jl diff --git a/src/treetensornetwork/ttno.jl b/src/treetensornetworks/ttno.jl similarity index 100% rename from src/treetensornetwork/ttno.jl rename to src/treetensornetworks/ttno.jl diff --git a/src/treetensornetwork/ttns.jl b/src/treetensornetworks/ttns.jl similarity index 100% rename from src/treetensornetwork/ttns.jl rename to src/treetensornetworks/ttns.jl From 105cec6ae2b65fc17919a724a9193cec53c4fe8d Mon Sep 17 00:00:00 2001 From: mtfishman Date: Mon, 9 Jan 2023 16:52:50 -0500 Subject: [PATCH 13/13] Fix some warnings --- README.md | 2 ++ src/indsnetwork.jl | 2 +- src/requires/omeinsumcontractionorders.jl | 2 +- test/test_indsnetwork.jl | 2 +- test/test_opsum_to_ttno.jl | 5 +++-- test/test_sitetype.jl | 1 + 6 files changed, 9 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 42a40237..638fd2ef 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ A package to provide general network data structures and tools to use with ITensors.jl. + + ## Installation You can install this package through the Julia package manager: diff --git a/src/indsnetwork.jl b/src/indsnetwork.jl index 2341a4ce..251e3c23 100644 --- a/src/indsnetwork.jl +++ b/src/indsnetwork.jl @@ -19,7 +19,7 @@ is_directed(::Type{<:IndsNetwork}) = false # When setting an edge with collections of `Index`, set the reverse direction # edge with the `dag`. function reverse_data_direction( - inds_network::IndsNetwork, is::Union{Index,Tuple{Vararg{<:Index}},Vector{<:Index}} + inds_network::IndsNetwork, is::Union{Index,Tuple{Vararg{Index}},Vector{<:Index}} ) return dag(is) end diff --git a/src/requires/omeinsumcontractionorders.jl b/src/requires/omeinsumcontractionorders.jl index f9a96af1..c62fdabc 100644 --- a/src/requires/omeinsumcontractionorders.jl +++ b/src/requires/omeinsumcontractionorders.jl @@ -1,7 +1,7 @@ # OMEinsumContractionOrders wrapper for ITensors # Slicing is not supported, because it might require extra work to slice an `ITensor` correctly. -const ITensorList = Union{Vector{<:ITensor},Tuple{Vararg{<:ITensor}}} +const ITensorList = Union{Vector{ITensor},Tuple{Vararg{ITensor}}} # TODO: Replace with `inds(A::ITensor)` or `collect(inds(A::ITensor))` getid(index::Index) = index diff --git a/test/test_indsnetwork.jl b/test/test_indsnetwork.jl index e5567f60..1a9bc27f 100644 --- a/test/test_indsnetwork.jl +++ b/test/test_indsnetwork.jl @@ -1,6 +1,6 @@ +using Dictionaries using ITensors using ITensorNetworks -using Dictionaries using Random using Test diff --git a/test/test_opsum_to_ttno.jl b/test/test_opsum_to_ttno.jl index b340270d..34ffa488 100644 --- a/test/test_opsum_to_ttno.jl +++ b/test/test_opsum_to_ttno.jl @@ -1,7 +1,8 @@ -using Test -using ITensorNetworks +using Dictionaries using ITensors +using ITensorNetworks using Random +using Test @testset "OpSum to TTNO" begin # small comb tree diff --git a/test/test_sitetype.jl b/test/test_sitetype.jl index 6f90205b..a3515565 100644 --- a/test/test_sitetype.jl +++ b/test/test_sitetype.jl @@ -1,3 +1,4 @@ +using Dictionaries using ITensors using ITensorNetworks using Random