diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index ce77aac3..33ec93d7 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -52,67 +52,3 @@ steps: queue: central slurm_ntasks: 1 - - label: "Gaussian Process Emulator" - key: "gaussian_process_emulator" - command: | - export PYTHON="$$JULIA_DEPOT_PATH/conda/3/x86_64/bin/python" - export PYTHONHOME="$$JULIA_DEPOT_PATH/conda/3/x86_64/bin" - export CONDA_JL_HOME="$$JULIA_DEPOT_PATH/conda/3/x86_64/" - - mkdir examples/Emulator/GaussianProcess/depot - export JULIA_DEPOT_PATH="$$(pwd)/examples/Emulator/GaussianProcess/depot:$JULIA_DEPOT_PATH" - - julia --color=yes --project=examples/Emulator/GaussianProcess -e ' - println("--- Developing Project") - using Pkg; - Pkg.develop(path=".") - Pkg.update() - println("--- Instantiating Project") - Pkg.instantiate() - println("+++ Running PlotGP") - include("examples/Emulator/GaussianProcess/plot_GP.jl") - println("+++ Running Learn Noise") - include("examples/Emulator/GaussianProcess/learn_noise.jl")' - artifact_paths: - - "examples/Emulator/GaussianProcess/output/*.png" - env: - PYTHON: "$$JULIA_DEPOT_PATH/conda/3/bin/python" - PYTHONHOME: "$$JULIA_DEPOT_PATH/conda/3/bin" - CONDA_JL_HOME: "$$JULIA_DEPOT_PATH/conda/3" - agents: - config: cpu - queue: central - slurm_ntasks: 1 - - - label: "Random Feature Emulator" - key: "random_feature_emulator" - command: | - export PYTHON="$$JULIA_DEPOT_PATH/conda/3/x86_64/bin/python" - export PYTHONHOME="$$JULIA_DEPOT_PATH/conda/3/x86_64/bin" - export CONDA_JL_HOME="$$JULIA_DEPOT_PATH/conda/3/x86_64/" - - mkdir examples/Emulator/RandomFeature/depot - export JULIA_DEPOT_PATH="$$(pwd)/examples/Emulator/RandomFeature/depot:$JULIA_DEPOT_PATH" - - julia --color=yes --project=examples/Emulator/RandomFeature -e ' - println("--- Developing Project") - using Pkg; - Pkg.develop(path=".") - Pkg.update() - println("--- Instantiating Project") - Pkg.instantiate() - println("+++ Running Scalar Random Feature cases") - include("examples/Emulator/RandomFeature/scalar_optimize_and_plot_RF.jl") - println("+++ Running Vector Random Feature cases") - include("examples/Emulator/RandomFeature/vector_optimize_and_plot_RF.jl")' - artifact_paths: - - "examples/Emulator/RandomFeature/output/*.png" - env: - PYTHON: "$$JULIA_DEPOT_PATH/conda/3/bin/python" - PYTHONHOME: "$$JULIA_DEPOT_PATH/conda/3/bin" - CONDA_JL_HOME: "$$JULIA_DEPOT_PATH/conda/3" - agents: - config: cpu - queue: central - slurm_ntasks: 1 - diff --git a/docs/Manifest.toml b/docs/Manifest.toml deleted file mode 100644 index 2591ab90..00000000 --- a/docs/Manifest.toml +++ /dev/null @@ -1,1116 +0,0 @@ -# This file is machine-generated - editing it directly is not advised - -julia_version = "1.8.5" -manifest_format = "2.0" -project_hash = "e4d965798d55f5903483b71f545533e62ea32fdf" - -[[deps.AMD]] -deps = ["Libdl", "LinearAlgebra", "SparseArrays", "Test"] -git-tree-sha1 = "00163dc02b882ca5ec032400b919e5f5011dbd31" -uuid = "14f7f29c-3bd6-536c-9a0b-7339e30b5a3e" -version = "0.5.0" - -[[deps.ANSIColoredPrinters]] -git-tree-sha1 = "574baf8110975760d391c710b6341da1afa48d8c" -uuid = "a4c015fc-c6ff-483c-b24f-f7ea428134e9" -version = "0.0.1" - -[[deps.AbstractFFTs]] -deps = ["ChainRulesCore", "LinearAlgebra"] -git-tree-sha1 = "16b6dbc4cf7caee4e1e75c49485ec67b667098a0" -uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c" -version = "1.3.1" - -[[deps.AbstractMCMC]] -deps = ["BangBang", "ConsoleProgressMonitor", "Distributed", "LogDensityProblems", "Logging", "LoggingExtras", "ProgressLogging", "Random", "StatsBase", "TerminalLoggers", "Transducers"] -git-tree-sha1 = "323799cab36200a01f5e9da3fecbd58329e2dd27" -uuid = "80f14c24-f653-4e6a-9b94-39d6b0f70001" -version = "4.4.0" - -[[deps.AbstractTrees]] -git-tree-sha1 = "faa260e4cb5aba097a73fab382dd4b5819d8ec8c" -uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" -version = "0.4.4" - -[[deps.Adapt]] -deps = ["LinearAlgebra", "Requires"] -git-tree-sha1 = "cc37d689f599e8df4f464b2fa3870ff7db7492ef" -uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" -version = "3.6.1" - -[[deps.AdvancedMH]] -deps = ["AbstractMCMC", "Distributions", "LogDensityProblems", "Random", "Requires"] -git-tree-sha1 = "165af834eee68d0a96c58daa950ddf0b3f45f608" -uuid = "5b7e9947-ddc0-4b3f-9b55-0d8042f74170" -version = "0.7.4" - -[[deps.ArgCheck]] -git-tree-sha1 = "a3a402a35a2f7e0b87828ccabbd5ebfbebe356b4" -uuid = "dce04be8-c92d-5529-be00-80e4d2c0e197" -version = "2.3.0" - -[[deps.ArgTools]] -uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" -version = "1.1.1" - -[[deps.ArrayInterface]] -deps = ["Adapt", "LinearAlgebra", "Requires", "SnoopPrecompile", "SparseArrays", "SuiteSparse"] -git-tree-sha1 = "38911c7737e123b28182d89027f4216cfc8a9da7" -uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9" -version = "7.4.3" - -[[deps.ArrayInterfaceCore]] -deps = ["LinearAlgebra", "SnoopPrecompile", "SparseArrays", "SuiteSparse"] -git-tree-sha1 = "e5f08b5689b1aad068e01751889f2f615c7db36d" -uuid = "30b0a656-2188-435a-8636-2ec0e6a096e2" -version = "0.1.29" - -[[deps.Artifacts]] -uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" - -[[deps.AxisAlgorithms]] -deps = ["LinearAlgebra", "Random", "SparseArrays", "WoodburyMatrices"] -git-tree-sha1 = "66771c8d21c8ff5e3a93379480a2307ac36863f7" -uuid = "13072b0f-2c55-5437-9ae7-d433b7a33950" -version = "1.0.1" - -[[deps.AxisArrays]] -deps = ["Dates", "IntervalSets", "IterTools", "RangeArrays"] -git-tree-sha1 = "1dd4d9f5beebac0c03446918741b1a03dc5e5788" -uuid = "39de3d68-74b9-583c-8d2d-e117c070f3a9" -version = "0.4.6" - -[[deps.BangBang]] -deps = ["Compat", "ConstructionBase", "Future", "InitialValues", "LinearAlgebra", "Requires", "Setfield", "Tables", "ZygoteRules"] -git-tree-sha1 = "7fe6d92c4f281cf4ca6f2fba0ce7b299742da7ca" -uuid = "198e06fe-97b7-11e9-32a5-e1d131e6ad66" -version = "0.3.37" - -[[deps.Base64]] -uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" - -[[deps.Baselet]] -git-tree-sha1 = "aebf55e6d7795e02ca500a689d326ac979aaf89e" -uuid = "9718e550-a3fa-408a-8086-8db961cd8217" -version = "0.1.1" - -[[deps.BenchmarkTools]] -deps = ["JSON", "Logging", "Printf", "Profile", "Statistics", "UUIDs"] -git-tree-sha1 = "d9a9701b899b30332bbcb3e1679c41cce81fb0e8" -uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" -version = "1.3.2" - -[[deps.BitTwiddlingConvenienceFunctions]] -deps = ["Static"] -git-tree-sha1 = "0c5f81f47bbbcf4aea7b2959135713459170798b" -uuid = "62783981-4cbd-42fc-bca8-16325de8dc4b" -version = "0.1.5" - -[[deps.Bzip2_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "19a35467a82e236ff51bc17a3a44b69ef35185a2" -uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0" -version = "1.0.8+0" - -[[deps.CPUSummary]] -deps = ["CpuId", "IfElse", "Static"] -git-tree-sha1 = "2c144ddb46b552f72d7eafe7cc2f50746e41ea21" -uuid = "2a0fbf3d-bb9c-48f3-b0a9-814d99fd7ab9" -version = "0.2.2" - -[[deps.CalibrateEmulateSample]] -deps = ["AbstractMCMC", "AdvancedMH", "Conda", "Distributions", "DocStringExtensions", "EnsembleKalmanProcesses", "GaussianProcesses", "LinearAlgebra", "MCMCChains", "Pkg", "Printf", "ProgressBars", "PyCall", "Random", "RandomFeatures", "ScikitLearn", "StableRNGs", "Statistics", "StatsBase"] -path = ".." -uuid = "95e48a1f-0bec-4818-9538-3db4340308e3" -version = "0.2.0" - -[[deps.ChainRulesCore]] -deps = ["Compat", "LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "c6d890a52d2c4d55d326439580c3b8d0875a77d9" -uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -version = "1.15.7" - -[[deps.ChangesOfVariables]] -deps = ["LinearAlgebra", "Test"] -git-tree-sha1 = "f84967c4497e0e1955f9a582c232b02847c5f589" -uuid = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0" -version = "0.1.7" - -[[deps.CloseOpenIntervals]] -deps = ["Static", "StaticArrayInterface"] -git-tree-sha1 = "70232f82ffaab9dc52585e0dd043b5e0c6b714f1" -uuid = "fb6a15b2-703c-40df-9091-08a04967cfa9" -version = "0.1.12" - -[[deps.CodecBzip2]] -deps = ["Bzip2_jll", "Libdl", "TranscodingStreams"] -git-tree-sha1 = "2e62a725210ce3c3c2e1a3080190e7ca491f18d7" -uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd" -version = "0.7.2" - -[[deps.CodecZlib]] -deps = ["TranscodingStreams", "Zlib_jll"] -git-tree-sha1 = "9c209fb7536406834aa938fb149964b985de6c83" -uuid = "944b1d66-785c-5afd-91f1-9de20f533193" -version = "0.7.1" - -[[deps.CommonSubexpressions]] -deps = ["MacroTools", "Test"] -git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7" -uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" -version = "0.3.0" - -[[deps.Compat]] -deps = ["Dates", "LinearAlgebra", "UUIDs"] -git-tree-sha1 = "7a60c856b9fa189eb34f5f8a6f6b5529b7942957" -uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "4.6.1" - -[[deps.CompilerSupportLibraries_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" -version = "1.0.1+0" - -[[deps.CompositionsBase]] -git-tree-sha1 = "455419f7e328a1a2493cabc6428d79e951349769" -uuid = "a33af91c-f02d-484b-be07-31d278c5ca2b" -version = "0.1.1" - -[[deps.Conda]] -deps = ["Downloads", "JSON", "VersionParsing"] -git-tree-sha1 = "e32a90da027ca45d84678b826fffd3110bb3fc90" -uuid = "8f4d0f93-b110-5947-807f-2305c1781a2d" -version = "1.8.0" - -[[deps.ConsoleProgressMonitor]] -deps = ["Logging", "ProgressMeter"] -git-tree-sha1 = "3ab7b2136722890b9af903859afcf457fa3059e8" -uuid = "88cd18e8-d9cc-4ea6-8889-5259c0d15c8b" -version = "0.1.2" - -[[deps.ConstructionBase]] -deps = ["LinearAlgebra"] -git-tree-sha1 = "89a9db8d28102b094992472d333674bd1a83ce2a" -uuid = "187b0558-2788-49d3-abe0-74a17ed4e7c9" -version = "1.5.1" - -[[deps.Convex]] -deps = ["AbstractTrees", "BenchmarkTools", "LDLFactorizations", "LinearAlgebra", "MathOptInterface", "OrderedCollections", "SparseArrays", "Test"] -git-tree-sha1 = "af4188609c0620ed4b0e4493ed416d3c8b2dadeb" -uuid = "f65535da-76fb-5f13-bab9-19810c17039a" -version = "0.15.3" - -[[deps.CpuId]] -deps = ["Markdown"] -git-tree-sha1 = "fcbb72b032692610bfbdb15018ac16a36cf2e406" -uuid = "adafc99b-e345-5852-983c-f28acb93d879" -version = "0.3.1" - -[[deps.Crayons]] -git-tree-sha1 = "249fe38abf76d48563e2f4556bebd215aa317e15" -uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f" -version = "4.1.1" - -[[deps.DataAPI]] -git-tree-sha1 = "e8119c1a33d267e16108be441a287a6981ba1630" -uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" -version = "1.14.0" - -[[deps.DataFrames]] -deps = ["Compat", "DataAPI", "Future", "InlineStrings", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrettyTables", "Printf", "REPL", "Random", "Reexport", "SentinelArrays", "SnoopPrecompile", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"] -git-tree-sha1 = "aa51303df86f8626a962fccb878430cdb0a97eee" -uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" -version = "1.5.0" - -[[deps.DataStructures]] -deps = ["Compat", "InteractiveUtils", "OrderedCollections"] -git-tree-sha1 = "d1fff3a548102f48987a52a2e0d114fa97d730f0" -uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" -version = "0.18.13" - -[[deps.DataValueInterfaces]] -git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6" -uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464" -version = "1.0.0" - -[[deps.Dates]] -deps = ["Printf"] -uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" - -[[deps.DefineSingletons]] -git-tree-sha1 = "0fba8b706d0178b4dc7fd44a96a92382c9065c2c" -uuid = "244e2a9f-e319-4986-a169-4d1fe445cd52" -version = "0.1.2" - -[[deps.DensityInterface]] -deps = ["InverseFunctions", "Test"] -git-tree-sha1 = "80c3e8639e3353e5d2912fb3a1916b8455e2494b" -uuid = "b429d917-457f-4dbc-8f4c-0cc954292b1d" -version = "0.4.0" - -[[deps.DiffResults]] -deps = ["StaticArraysCore"] -git-tree-sha1 = "782dd5f4561f5d267313f23853baaaa4c52ea621" -uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" -version = "1.1.0" - -[[deps.DiffRules]] -deps = ["IrrationalConstants", "LogExpFunctions", "NaNMath", "Random", "SpecialFunctions"] -git-tree-sha1 = "a4ad7ef19d2cdc2eff57abbbe68032b1cd0bd8f8" -uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" -version = "1.13.0" - -[[deps.Distances]] -deps = ["LinearAlgebra", "SparseArrays", "Statistics", "StatsAPI"] -git-tree-sha1 = "49eba9ad9f7ead780bfb7ee319f962c811c6d3b2" -uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" -version = "0.10.8" - -[[deps.Distributed]] -deps = ["Random", "Serialization", "Sockets"] -uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" - -[[deps.Distributions]] -deps = ["ChainRulesCore", "DensityInterface", "FillArrays", "LinearAlgebra", "PDMats", "Printf", "QuadGK", "Random", "SparseArrays", "SpecialFunctions", "Statistics", "StatsBase", "StatsFuns", "Test"] -git-tree-sha1 = "180538ef4e3aa02b01413055a7a9e8b6047663e1" -uuid = "31c24e10-a181-5473-b8eb-7969acd0382f" -version = "0.25.88" - -[[deps.DocStringExtensions]] -deps = ["LibGit2"] -git-tree-sha1 = "2fb1e02f2b635d0845df5d7c167fec4dd739b00d" -uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" -version = "0.9.3" - -[[deps.Documenter]] -deps = ["ANSIColoredPrinters", "Base64", "Dates", "DocStringExtensions", "IOCapture", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"] -git-tree-sha1 = "58fea7c536acd71f3eef6be3b21c0df5f3df88fd" -uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4" -version = "0.27.24" - -[[deps.Downloads]] -deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"] -uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" -version = "1.6.0" - -[[deps.ElasticArrays]] -deps = ["Adapt"] -git-tree-sha1 = "e1c40d78de68e9a2be565f0202693a158ec9ad85" -uuid = "fdbdab4c-e67f-52f5-8c3f-e7b388dad3d4" -version = "1.2.11" - -[[deps.ElasticPDMats]] -deps = ["LinearAlgebra", "MacroTools", "PDMats"] -git-tree-sha1 = "5157c93fe9431a041e4cd84265dfce3d53a52323" -uuid = "2904ab23-551e-5aed-883f-487f97af5226" -version = "0.2.2" - -[[deps.EnsembleKalmanProcesses]] -deps = ["Convex", "Distributions", "DocStringExtensions", "LinearAlgebra", "MathOptInterface", "Optim", "QuadGK", "Random", "RecipesBase", "SCS", "SparseArrays", "Statistics", "StatsBase", "TOML"] -git-tree-sha1 = "67916022c1660cced17323d550074aa25d4c4a98" -uuid = "aa8a2aa5-91d8-4396-bcef-d4f2ec43552d" -version = "1.0.0" - -[[deps.FFTW]] -deps = ["AbstractFFTs", "FFTW_jll", "LinearAlgebra", "MKL_jll", "Preferences", "Reexport"] -git-tree-sha1 = "f9818144ce7c8c41edf5c4c179c684d92aa4d9fe" -uuid = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" -version = "1.6.0" - -[[deps.FFTW_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "c6033cc3892d0ef5bb9cd29b7f2f0331ea5184ea" -uuid = "f5851436-0d7a-5f13-b9de-f02708fd171a" -version = "3.3.10+0" - -[[deps.FastGaussQuadrature]] -deps = ["LinearAlgebra", "SpecialFunctions", "StaticArrays"] -git-tree-sha1 = "58d83dd5a78a36205bdfddb82b1bb67682e64487" -uuid = "442a2c76-b920-505d-bb47-c5924d526838" -version = "0.4.9" - -[[deps.FileWatching]] -uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee" - -[[deps.FillArrays]] -deps = ["LinearAlgebra", "Random", "SparseArrays", "Statistics"] -git-tree-sha1 = "fc86b4fd3eff76c3ce4f5e96e2fdfa6282722885" -uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" -version = "1.0.0" - -[[deps.FiniteDiff]] -deps = ["ArrayInterface", "LinearAlgebra", "Requires", "Setfield", "SparseArrays", "StaticArrays"] -git-tree-sha1 = "03fcb1c42ec905d15b305359603888ec3e65f886" -uuid = "6a86dc24-6348-571c-b903-95158fe2bd41" -version = "2.19.0" - -[[deps.Formatting]] -deps = ["Printf"] -git-tree-sha1 = "8339d61043228fdd3eb658d86c926cb282ae72a8" -uuid = "59287772-0a20-5a39-b81b-1366585eb4c0" -version = "0.4.2" - -[[deps.ForwardDiff]] -deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "LogExpFunctions", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions", "StaticArrays"] -git-tree-sha1 = "00e252f4d706b3d55a8863432e742bf5717b498d" -uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.35" - -[[deps.Future]] -deps = ["Random"] -uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820" - -[[deps.GaussianProcesses]] -deps = ["Distances", "Distributions", "ElasticArrays", "ElasticPDMats", "FastGaussQuadrature", "ForwardDiff", "LinearAlgebra", "Optim", "PDMats", "Printf", "ProgressMeter", "Random", "RecipesBase", "ScikitLearnBase", "SpecialFunctions", "StaticArrays", "Statistics", "StatsFuns"] -git-tree-sha1 = "31749ff6868caf6dd50902eec652a724071dbed3" -uuid = "891a1506-143c-57d2-908e-e1f8e92e6de9" -version = "0.12.5" - -[[deps.HostCPUFeatures]] -deps = ["BitTwiddlingConvenienceFunctions", "IfElse", "Libdl", "Static"] -git-tree-sha1 = "734fd90dd2f920a2f1921d5388dcebe805b262dc" -uuid = "3e5b6fbb-0976-4d2c-9146-d79de83f2fb0" -version = "0.1.14" - -[[deps.IOCapture]] -deps = ["Logging", "Random"] -git-tree-sha1 = "f7be53659ab06ddc986428d3a9dcc95f6fa6705a" -uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89" -version = "0.2.2" - -[[deps.IfElse]] -git-tree-sha1 = "debdd00ffef04665ccbb3e150747a77560e8fad1" -uuid = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173" -version = "0.1.1" - -[[deps.InitialValues]] -git-tree-sha1 = "4da0f88e9a39111c2fa3add390ab15f3a44f3ca3" -uuid = "22cec73e-a1b8-11e9-2c92-598750a2cf9c" -version = "0.3.1" - -[[deps.InlineStrings]] -deps = ["Parsers"] -git-tree-sha1 = "9cc2baf75c6d09f9da536ddf58eb2f29dedaf461" -uuid = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48" -version = "1.4.0" - -[[deps.IntelOpenMP_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "0cb9352ef2e01574eeebdb102948a58740dcaf83" -uuid = "1d5cc7b8-4909-519e-a0f8-d0f5ad9712d0" -version = "2023.1.0+0" - -[[deps.InteractiveUtils]] -deps = ["Markdown"] -uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" - -[[deps.Interpolations]] -deps = ["Adapt", "AxisAlgorithms", "ChainRulesCore", "LinearAlgebra", "OffsetArrays", "Random", "Ratios", "Requires", "SharedArrays", "SparseArrays", "StaticArrays", "WoodburyMatrices"] -git-tree-sha1 = "721ec2cf720536ad005cb38f50dbba7b02419a15" -uuid = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59" -version = "0.14.7" - -[[deps.IntervalSets]] -deps = ["Dates", "Random", "Statistics"] -git-tree-sha1 = "16c0cc91853084cb5f58a78bd209513900206ce6" -uuid = "8197267c-284f-5f27-9208-e0e47529a953" -version = "0.7.4" - -[[deps.InverseFunctions]] -deps = ["Test"] -git-tree-sha1 = "6667aadd1cdee2c6cd068128b3d226ebc4fb0c67" -uuid = "3587e190-3f89-42d0-90ee-14403ec27112" -version = "0.1.9" - -[[deps.InvertedIndices]] -git-tree-sha1 = "0dc7b50b8d436461be01300fd8cd45aa0274b038" -uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f" -version = "1.3.0" - -[[deps.IrrationalConstants]] -git-tree-sha1 = "7fd44fd4ff43fc60815f8e764c0f352b83c49151" -uuid = "92d709cd-6900-40b7-9082-c6be49f344b6" -version = "0.1.1" - -[[deps.IterTools]] -git-tree-sha1 = "fa6287a4469f5e048d763df38279ee729fbd44e5" -uuid = "c8e1da08-722c-5040-9ed9-7db0dc04731e" -version = "1.4.0" - -[[deps.IteratorInterfaceExtensions]] -git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856" -uuid = "82899510-4779-5014-852e-03e436cf321d" -version = "1.0.0" - -[[deps.JLLWrappers]] -deps = ["Preferences"] -git-tree-sha1 = "abc9885a7ca2052a736a600f7fa66209f96506e1" -uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" -version = "1.4.1" - -[[deps.JSON]] -deps = ["Dates", "Mmap", "Parsers", "Unicode"] -git-tree-sha1 = "31e996f0a15c7b280ba9f76636b3ff9e2ae58c9a" -uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -version = "0.21.4" - -[[deps.KernelDensity]] -deps = ["Distributions", "DocStringExtensions", "FFTW", "Interpolations", "StatsBase"] -git-tree-sha1 = "4a9513ad756e712177bd342ba6c022b515ed8d76" -uuid = "5ab0869b-81aa-558d-bb23-cbf5423bbe9b" -version = "0.6.6" - -[[deps.LDLFactorizations]] -deps = ["AMD", "LinearAlgebra", "SparseArrays", "Test"] -git-tree-sha1 = "cbf4b646f82bfc58bb48bcca9dcce2eb88da4cd1" -uuid = "40e66cde-538c-5869-a4ad-c39174c6795b" -version = "0.10.0" - -[[deps.LaTeXStrings]] -git-tree-sha1 = "f2355693d6778a178ade15952b7ac47a4ff97996" -uuid = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f" -version = "1.3.0" - -[[deps.LayoutPointers]] -deps = ["ArrayInterface", "LinearAlgebra", "ManualMemory", "SIMDTypes", "Static", "StaticArrayInterface"] -git-tree-sha1 = "88b8f66b604da079a627b6fb2860d3704a6729a1" -uuid = "10f19ff3-798f-405d-979b-55457f8fc047" -version = "0.1.14" - -[[deps.LazyArtifacts]] -deps = ["Artifacts", "Pkg"] -uuid = "4af54fe1-eca0-43a8-85a7-787d91b784e3" - -[[deps.LeftChildRightSiblingTrees]] -deps = ["AbstractTrees"] -git-tree-sha1 = "fb6803dafae4a5d62ea5cab204b1e657d9737e7f" -uuid = "1d6d02ad-be62-4b6b-8a6d-2f90e265016e" -version = "0.2.0" - -[[deps.LibCURL]] -deps = ["LibCURL_jll", "MozillaCACerts_jll"] -uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" -version = "0.6.3" - -[[deps.LibCURL_jll]] -deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] -uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" -version = "7.84.0+0" - -[[deps.LibGit2]] -deps = ["Base64", "NetworkOptions", "Printf", "SHA"] -uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" - -[[deps.LibSSH2_jll]] -deps = ["Artifacts", "Libdl", "MbedTLS_jll"] -uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" -version = "1.10.2+0" - -[[deps.Libdl]] -uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" - -[[deps.LineSearches]] -deps = ["LinearAlgebra", "NLSolversBase", "NaNMath", "Parameters", "Printf"] -git-tree-sha1 = "7bbea35cec17305fc70a0e5b4641477dc0789d9d" -uuid = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" -version = "7.2.0" - -[[deps.LinearAlgebra]] -deps = ["Libdl", "libblastrampoline_jll"] -uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" - -[[deps.LogDensityProblems]] -deps = ["ArgCheck", "DocStringExtensions", "Random"] -git-tree-sha1 = "f9a11237204bc137617194d79d813069838fcf61" -uuid = "6fdf6af0-433a-55f7-b3ed-c6c6e0b8df7c" -version = "2.1.1" - -[[deps.LogExpFunctions]] -deps = ["ChainRulesCore", "ChangesOfVariables", "DocStringExtensions", "InverseFunctions", "IrrationalConstants", "LinearAlgebra"] -git-tree-sha1 = "0a1b7c2863e44523180fdb3146534e265a91870b" -uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -version = "0.3.23" - -[[deps.Logging]] -uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" - -[[deps.LoggingExtras]] -deps = ["Dates", "Logging"] -git-tree-sha1 = "cedb76b37bc5a6c702ade66be44f831fa23c681e" -uuid = "e6f89c97-d47a-5376-807f-9c37f3926c36" -version = "1.0.0" - -[[deps.LoopVectorization]] -deps = ["ArrayInterface", "ArrayInterfaceCore", "CPUSummary", "ChainRulesCore", "CloseOpenIntervals", "DocStringExtensions", "ForwardDiff", "HostCPUFeatures", "IfElse", "LayoutPointers", "LinearAlgebra", "OffsetArrays", "PolyesterWeave", "PrecompileTools", "SIMDTypes", "SLEEFPirates", "SpecialFunctions", "Static", "StaticArrayInterface", "ThreadingUtilities", "UnPack", "VectorizationBase"] -git-tree-sha1 = "e7ce3cdc520da8135e73d7cb303e0617a19f582b" -uuid = "bdcacae8-1622-11e9-2a5c-532679323890" -version = "0.12.158" - -[[deps.MCMCChains]] -deps = ["AbstractMCMC", "AxisArrays", "Dates", "Distributions", "Formatting", "IteratorInterfaceExtensions", "KernelDensity", "LinearAlgebra", "MCMCDiagnosticTools", "MLJModelInterface", "NaturalSort", "OrderedCollections", "PrettyTables", "Random", "RecipesBase", "Serialization", "Statistics", "StatsBase", "StatsFuns", "TableTraits", "Tables"] -git-tree-sha1 = "c659f7508035a7bdd5102aef2de028ab035f289a" -uuid = "c7f686f2-ff18-58e9-bc7b-31028e88f75d" -version = "5.7.1" - -[[deps.MCMCDiagnosticTools]] -deps = ["AbstractFFTs", "DataAPI", "DataStructures", "Distributions", "LinearAlgebra", "MLJModelInterface", "Random", "SpecialFunctions", "Statistics", "StatsBase", "Tables"] -git-tree-sha1 = "d1737c39191aa26f42a64e320de313f1d1fd74b1" -uuid = "be115224-59cd-429b-ad48-344e309966f0" -version = "0.2.1" - -[[deps.MKL_jll]] -deps = ["Artifacts", "IntelOpenMP_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "Pkg"] -git-tree-sha1 = "2ce8695e1e699b68702c03402672a69f54b8aca9" -uuid = "856f044c-d86e-5d09-b602-aeab76dc8ba7" -version = "2022.2.0+0" - -[[deps.MLJModelInterface]] -deps = ["Random", "ScientificTypesBase", "StatisticalTraits"] -git-tree-sha1 = "c8b7e632d6754a5e36c0d94a4b466a5ba3a30128" -uuid = "e80e1ace-859a-464e-9ed9-23947d8ae3ea" -version = "1.8.0" - -[[deps.MacroTools]] -deps = ["Markdown", "Random"] -git-tree-sha1 = "42324d08725e200c23d4dfb549e0d5d89dede2d2" -uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" -version = "0.5.10" - -[[deps.ManualMemory]] -git-tree-sha1 = "bcaef4fc7a0cfe2cba636d84cda54b5e4e4ca3cd" -uuid = "d125e4d3-2237-4719-b19c-fa641b8a4667" -version = "0.1.8" - -[[deps.Markdown]] -deps = ["Base64"] -uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" - -[[deps.MathOptInterface]] -deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "DataStructures", "ForwardDiff", "JSON", "LinearAlgebra", "MutableArithmetics", "NaNMath", "OrderedCollections", "PrecompileTools", "Printf", "SparseArrays", "SpecialFunctions", "Test", "Unicode"] -git-tree-sha1 = "8e054675d393ce5866dcdd6a071075e25e21a39c" -uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" -version = "1.15.1" - -[[deps.MbedTLS_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" -version = "2.28.0+0" - -[[deps.MicroCollections]] -deps = ["BangBang", "InitialValues", "Setfield"] -git-tree-sha1 = "629afd7d10dbc6935ec59b32daeb33bc4460a42e" -uuid = "128add7d-3638-4c79-886c-908ea0c25c34" -version = "0.1.4" - -[[deps.Missings]] -deps = ["DataAPI"] -git-tree-sha1 = "f66bdc5de519e8f8ae43bdc598782d35a25b1272" -uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" -version = "1.1.0" - -[[deps.Mmap]] -uuid = "a63ad114-7e13-5084-954f-fe012c677804" - -[[deps.MozillaCACerts_jll]] -uuid = "14a3606d-f60d-562e-9121-12d972cd8159" -version = "2022.2.1" - -[[deps.MutableArithmetics]] -deps = ["LinearAlgebra", "SparseArrays", "Test"] -git-tree-sha1 = "3295d296288ab1a0a2528feb424b854418acff57" -uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0" -version = "1.2.3" - -[[deps.NLSolversBase]] -deps = ["DiffResults", "Distributed", "FiniteDiff", "ForwardDiff"] -git-tree-sha1 = "a0b464d183da839699f4c79e7606d9d186ec172c" -uuid = "d41bc354-129a-5804-8e4c-c37616107c6c" -version = "7.8.3" - -[[deps.NaNMath]] -deps = ["OpenLibm_jll"] -git-tree-sha1 = "0877504529a3e5c3343c6f8b4c0381e57e4387e4" -uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" -version = "1.0.2" - -[[deps.NaturalSort]] -git-tree-sha1 = "eda490d06b9f7c00752ee81cfa451efe55521e21" -uuid = "c020b1a1-e9b0-503a-9c33-f039bfc54a85" -version = "1.0.0" - -[[deps.NetworkOptions]] -uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" -version = "1.2.0" - -[[deps.OffsetArrays]] -deps = ["Adapt"] -git-tree-sha1 = "82d7c9e310fe55aa54996e6f7f94674e2a38fcb4" -uuid = "6fe1bfb0-de20-5000-8ca7-80f57d26f881" -version = "1.12.9" - -[[deps.OpenBLAS32_jll]] -deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "9c6c2ed4b7acd2137b878eb96c68e63b76199d0f" -uuid = "656ef2d0-ae68-5445-9ca0-591084a874a2" -version = "0.3.17+0" - -[[deps.OpenBLAS_jll]] -deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"] -uuid = "4536629a-c528-5b80-bd46-f80d51c5b363" -version = "0.3.20+0" - -[[deps.OpenLibm_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "05823500-19ac-5b8b-9628-191a04bc5112" -version = "0.8.1+0" - -[[deps.OpenSpecFun_jll]] -deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1" -uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" -version = "0.5.5+0" - -[[deps.Optim]] -deps = ["Compat", "FillArrays", "ForwardDiff", "LineSearches", "LinearAlgebra", "NLSolversBase", "NaNMath", "Parameters", "PositiveFactorizations", "Printf", "SparseArrays", "StatsBase"] -git-tree-sha1 = "a89b11f0f354f06099e4001c151dffad7ebab015" -uuid = "429524aa-4258-5aef-a3af-852621145aeb" -version = "1.7.5" - -[[deps.OrderedCollections]] -git-tree-sha1 = "d321bf2de576bf25ec4d3e4360faca399afca282" -uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" -version = "1.6.0" - -[[deps.PDMats]] -deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse", "Test"] -git-tree-sha1 = "95a4038d1011dfdbde7cecd2ad0ac411e53ab1bc" -uuid = "90014a1f-27ba-587c-ab20-58faa44d9150" -version = "0.10.1" - -[[deps.Parameters]] -deps = ["OrderedCollections", "UnPack"] -git-tree-sha1 = "34c0e9ad262e5f7fc75b10a9952ca7692cfc5fbe" -uuid = "d96e819e-fc66-5662-9728-84c9c7592b0a" -version = "0.12.3" - -[[deps.Parsers]] -deps = ["Dates", "SnoopPrecompile"] -git-tree-sha1 = "478ac6c952fddd4399e71d4779797c538d0ff2bf" -uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "2.5.8" - -[[deps.Pkg]] -deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] -uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" -version = "1.8.0" - -[[deps.PolyesterWeave]] -deps = ["BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "Static", "ThreadingUtilities"] -git-tree-sha1 = "240d7170f5ffdb285f9427b92333c3463bf65bf6" -uuid = "1d0040c9-8b98-4ee7-8388-3f51789ca0ad" -version = "0.2.1" - -[[deps.PooledArrays]] -deps = ["DataAPI", "Future"] -git-tree-sha1 = "a6062fe4063cdafe78f4a0a81cfffb89721b30e7" -uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720" -version = "1.4.2" - -[[deps.PositiveFactorizations]] -deps = ["LinearAlgebra"] -git-tree-sha1 = "17275485f373e6673f7e7f97051f703ed5b15b20" -uuid = "85a6dd25-e78a-55b7-8502-1745935b8125" -version = "0.2.4" - -[[deps.PrecompileTools]] -deps = ["Preferences"] -git-tree-sha1 = "2e47054ffe7d0a8872e977c0d09eb4b3d162ebde" -uuid = "aea7be01-6a6a-4083-8856-8a6e6704d82a" -version = "1.0.2" - -[[deps.Preferences]] -deps = ["TOML"] -git-tree-sha1 = "47e5f437cc0e7ef2ce8406ce1e7e24d44915f88d" -uuid = "21216c6a-2e73-6563-6e65-726566657250" -version = "1.3.0" - -[[deps.PrettyTables]] -deps = ["Crayons", "Formatting", "LaTeXStrings", "Markdown", "Reexport", "StringManipulation", "Tables"] -git-tree-sha1 = "548793c7859e28ef026dba514752275ee871169f" -uuid = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d" -version = "2.2.3" - -[[deps.Printf]] -deps = ["Unicode"] -uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" - -[[deps.Profile]] -deps = ["Printf"] -uuid = "9abbd945-dff8-562f-b5e8-e1ebf5ef1b79" - -[[deps.ProgressBars]] -deps = ["Printf"] -git-tree-sha1 = "9d84c8646109eb8bc7a006d59b157c64d5155c81" -uuid = "49802e3a-d2f1-5c88-81d8-b72133a6f568" -version = "1.5.0" - -[[deps.ProgressLogging]] -deps = ["Logging", "SHA", "UUIDs"] -git-tree-sha1 = "80d919dee55b9c50e8d9e2da5eeafff3fe58b539" -uuid = "33c8b6b6-d38a-422a-b730-caa89a2f386c" -version = "0.1.4" - -[[deps.ProgressMeter]] -deps = ["Distributed", "Printf"] -git-tree-sha1 = "d7a7aef8f8f2d537104f170139553b14dfe39fe9" -uuid = "92933f4c-e287-5a05-a399-4b506db050ca" -version = "1.7.2" - -[[deps.PyCall]] -deps = ["Conda", "Dates", "Libdl", "LinearAlgebra", "MacroTools", "Serialization", "VersionParsing"] -git-tree-sha1 = "62f417f6ad727987c755549e9cd88c46578da562" -uuid = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" -version = "1.95.1" - -[[deps.QuadGK]] -deps = ["DataStructures", "LinearAlgebra"] -git-tree-sha1 = "6ec7ac8412e83d57e313393220879ede1740f9ee" -uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc" -version = "2.8.2" - -[[deps.REPL]] -deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] -uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" - -[[deps.Random]] -deps = ["SHA", "Serialization"] -uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" - -[[deps.RandomFeatures]] -deps = ["Distributions", "DocStringExtensions", "EnsembleKalmanProcesses", "LinearAlgebra", "LoopVectorization", "Random", "SpecialFunctions", "Statistics", "StatsBase", "Tullio"] -git-tree-sha1 = "415d1bade5527b9716f4cc8ffc24935700b838a0" -uuid = "36c3bae2-c0c3-419d-b3b4-eebadd35c5e5" -version = "0.3.1" - -[[deps.RangeArrays]] -git-tree-sha1 = "b9039e93773ddcfc828f12aadf7115b4b4d225f5" -uuid = "b3c3ace0-ae52-54e7-9d0b-2c1406fd6b9d" -version = "0.3.2" - -[[deps.Ratios]] -deps = ["Requires"] -git-tree-sha1 = "6d7bb727e76147ba18eed998700998e17b8e4911" -uuid = "c84ed2f1-dad5-54f0-aa8e-dbefe2724439" -version = "0.4.4" - -[[deps.RecipesBase]] -deps = ["PrecompileTools"] -git-tree-sha1 = "5c3d09cc4f31f5fc6af001c250bf1278733100ff" -uuid = "3cdcf5f2-1ef4-517c-9805-6587b60abb01" -version = "1.3.4" - -[[deps.Reexport]] -git-tree-sha1 = "45e428421666073eab6f2da5c9d310d99bb12f9b" -uuid = "189a3867-3050-52da-a836-e630ba90ab69" -version = "1.2.2" - -[[deps.Requires]] -deps = ["UUIDs"] -git-tree-sha1 = "838a3a4188e2ded87a4f9f184b4b0d78a1e91cb7" -uuid = "ae029012-a4dd-5104-9daa-d747884805df" -version = "1.3.0" - -[[deps.Rmath]] -deps = ["Random", "Rmath_jll"] -git-tree-sha1 = "f65dcb5fa46aee0cf9ed6274ccbd597adc49aa7b" -uuid = "79098fc4-a85e-5d69-aa6a-4863f24498fa" -version = "0.7.1" - -[[deps.Rmath_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "6ed52fdd3382cf21947b15e8870ac0ddbff736da" -uuid = "f50d1b31-88e8-58de-be2c-1cc44531875f" -version = "0.4.0+0" - -[[deps.SCS]] -deps = ["MathOptInterface", "Requires", "SCS_GPU_jll", "SCS_MKL_jll", "SCS_jll", "SparseArrays"] -git-tree-sha1 = "05c1ed62a8d78827d0dd1a9fa04040a4a254bf08" -uuid = "c946c3f1-0d1f-5ce8-9dea-7daa1f7e2d13" -version = "1.1.4" - -[[deps.SCS_GPU_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "OpenBLAS32_jll"] -git-tree-sha1 = "6a61274837cfa050bd996910d347e876bef3a6b3" -uuid = "af6e375f-46ec-5fa0-b791-491b0dfa44a4" -version = "3.2.3+1" - -[[deps.SCS_MKL_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "MKL_jll"] -git-tree-sha1 = "15b887b4b1f747f98b22fba2225fe7cd26861cea" -uuid = "3f2553a9-4106-52be-b7dd-865123654657" -version = "3.2.3+0" - -[[deps.SCS_jll]] -deps = ["Artifacts", "JLLWrappers", "Libdl", "OpenBLAS32_jll"] -git-tree-sha1 = "e4902566d6207206c27fe6f45e8c2d28c34889df" -uuid = "f4f2fc5b-1d94-523c-97ea-2ab488bedf4b" -version = "3.2.3+0" - -[[deps.SHA]] -uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" -version = "0.7.0" - -[[deps.SIMDTypes]] -git-tree-sha1 = "330289636fb8107c5f32088d2741e9fd7a061a5c" -uuid = "94e857df-77ce-4151-89e5-788b33177be4" -version = "0.1.0" - -[[deps.SLEEFPirates]] -deps = ["IfElse", "Static", "VectorizationBase"] -git-tree-sha1 = "cda0aece8080e992f6370491b08ef3909d1c04e7" -uuid = "476501e8-09a2-5ece-8869-fb82de89a1fa" -version = "0.6.38" - -[[deps.ScientificTypesBase]] -git-tree-sha1 = "a8e18eb383b5ecf1b5e6fc237eb39255044fd92b" -uuid = "30f210dd-8aff-4c5f-94ba-8e64358c1161" -version = "3.0.0" - -[[deps.ScikitLearn]] -deps = ["Compat", "Conda", "DataFrames", "Distributed", "IterTools", "LinearAlgebra", "MacroTools", "Parameters", "Printf", "PyCall", "Random", "ScikitLearnBase", "SparseArrays", "StatsBase", "VersionParsing"] -git-tree-sha1 = "3df098033358431591827bb86cada0bed744105a" -uuid = "3646fa90-6ef7-5e7e-9f22-8aca16db6324" -version = "0.7.0" - -[[deps.ScikitLearnBase]] -deps = ["LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "7877e55c1523a4b336b433da39c8e8c08d2f221f" -uuid = "6e75b9c4-186b-50bd-896f-2d2496a4843e" -version = "0.5.0" - -[[deps.SentinelArrays]] -deps = ["Dates", "Random"] -git-tree-sha1 = "77d3c4726515dca71f6d80fbb5e251088defe305" -uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c" -version = "1.3.18" - -[[deps.Serialization]] -uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" - -[[deps.Setfield]] -deps = ["ConstructionBase", "Future", "MacroTools", "StaticArraysCore"] -git-tree-sha1 = "e2cc6d8c88613c05e1defb55170bf5ff211fbeac" -uuid = "efcf1570-3423-57d1-acb7-fd33fddbac46" -version = "1.1.1" - -[[deps.SharedArrays]] -deps = ["Distributed", "Mmap", "Random", "Serialization"] -uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" - -[[deps.SnoopPrecompile]] -deps = ["Preferences"] -git-tree-sha1 = "e760a70afdcd461cf01a575947738d359234665c" -uuid = "66db9d55-30c0-4569-8b51-7e840670fc0c" -version = "1.0.3" - -[[deps.Sockets]] -uuid = "6462fe0b-24de-5631-8697-dd941f90decc" - -[[deps.SortingAlgorithms]] -deps = ["DataStructures"] -git-tree-sha1 = "a4ada03f999bd01b3a25dcaa30b2d929fe537e00" -uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c" -version = "1.1.0" - -[[deps.SparseArrays]] -deps = ["LinearAlgebra", "Random"] -uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" - -[[deps.SpecialFunctions]] -deps = ["ChainRulesCore", "IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"] -git-tree-sha1 = "ef28127915f4229c971eb43f3fc075dd3fe91880" -uuid = "276daf66-3868-5448-9aa4-cd146d93841b" -version = "2.2.0" - -[[deps.SplittablesBase]] -deps = ["Setfield", "Test"] -git-tree-sha1 = "e08a62abc517eb79667d0a29dc08a3b589516bb5" -uuid = "171d559e-b47b-412a-8079-5efa626c420e" -version = "0.1.15" - -[[deps.StableRNGs]] -deps = ["Random", "Test"] -git-tree-sha1 = "3be7d49667040add7ee151fefaf1f8c04c8c8276" -uuid = "860ef19b-820b-49d6-a774-d7a799459cd3" -version = "1.0.0" - -[[deps.Static]] -deps = ["IfElse"] -git-tree-sha1 = "08be5ee09a7632c32695d954a602df96a877bf0d" -uuid = "aedffcd0-7271-4cad-89d0-dc628f76c6d3" -version = "0.8.6" - -[[deps.StaticArrayInterface]] -deps = ["ArrayInterface", "Compat", "IfElse", "LinearAlgebra", "Requires", "SnoopPrecompile", "SparseArrays", "Static", "SuiteSparse"] -git-tree-sha1 = "33040351d2403b84afce74dae2e22d3f5b18edcb" -uuid = "0d7ed370-da01-4f52-bd93-41d350b8b718" -version = "1.4.0" - -[[deps.StaticArrays]] -deps = ["LinearAlgebra", "Random", "StaticArraysCore", "Statistics"] -git-tree-sha1 = "c262c8e978048c2b095be1672c9bee55b4619521" -uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.5.24" - -[[deps.StaticArraysCore]] -git-tree-sha1 = "6b7ba252635a5eff6a0b0664a41ee140a1c9e72a" -uuid = "1e83bf80-4336-4d27-bf5d-d5a4f845583c" -version = "1.4.0" - -[[deps.StatisticalTraits]] -deps = ["ScientificTypesBase"] -git-tree-sha1 = "30b9236691858e13f167ce829490a68e1a597782" -uuid = "64bff920-2084-43da-a3e6-9bb72801c0c9" -version = "3.2.0" - -[[deps.Statistics]] -deps = ["LinearAlgebra", "SparseArrays"] -uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" - -[[deps.StatsAPI]] -deps = ["LinearAlgebra"] -git-tree-sha1 = "45a7769a04a3cf80da1c1c7c60caf932e6f4c9f7" -uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" -version = "1.6.0" - -[[deps.StatsBase]] -deps = ["DataAPI", "DataStructures", "LinearAlgebra", "LogExpFunctions", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"] -git-tree-sha1 = "d1bf48bfcc554a3761a133fe3a9bb01488e06916" -uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" -version = "0.33.21" - -[[deps.StatsFuns]] -deps = ["ChainRulesCore", "InverseFunctions", "IrrationalConstants", "LogExpFunctions", "Reexport", "Rmath", "SpecialFunctions"] -git-tree-sha1 = "5950925ff997ed6fb3e985dcce8eb1ba42a0bbe7" -uuid = "4c63d2b9-4356-54db-8cca-17b64c39e42c" -version = "0.9.18" - -[[deps.StringManipulation]] -git-tree-sha1 = "46da2434b41f41ac3594ee9816ce5541c6096123" -uuid = "892a3eda-7b42-436c-8928-eab12a02cf0e" -version = "0.3.0" - -[[deps.SuiteSparse]] -deps = ["Libdl", "LinearAlgebra", "Serialization", "SparseArrays"] -uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9" - -[[deps.TOML]] -deps = ["Dates"] -uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" -version = "1.0.0" - -[[deps.TableTraits]] -deps = ["IteratorInterfaceExtensions"] -git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39" -uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c" -version = "1.0.1" - -[[deps.Tables]] -deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits", "Test"] -git-tree-sha1 = "1544b926975372da01227b382066ab70e574a3ec" -uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c" -version = "1.10.1" - -[[deps.Tar]] -deps = ["ArgTools", "SHA"] -uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" -version = "1.10.1" - -[[deps.TerminalLoggers]] -deps = ["LeftChildRightSiblingTrees", "Logging", "Markdown", "Printf", "ProgressLogging", "UUIDs"] -git-tree-sha1 = "f133fab380933d042f6796eda4e130272ba520ca" -uuid = "5d786b92-1e48-4d6f-9151-6b4477ca9bed" -version = "0.1.7" - -[[deps.Test]] -deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] -uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - -[[deps.ThreadingUtilities]] -deps = ["ManualMemory"] -git-tree-sha1 = "c97f60dd4f2331e1a495527f80d242501d2f9865" -uuid = "8290d209-cae3-49c0-8002-c8c24d57dab5" -version = "0.5.1" - -[[deps.TranscodingStreams]] -deps = ["Random", "Test"] -git-tree-sha1 = "9a6ae7ed916312b41236fcef7e0af564ef934769" -uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" -version = "0.9.13" - -[[deps.Transducers]] -deps = ["Adapt", "ArgCheck", "BangBang", "Baselet", "CompositionsBase", "DefineSingletons", "Distributed", "InitialValues", "Logging", "Markdown", "MicroCollections", "Requires", "Setfield", "SplittablesBase", "Tables"] -git-tree-sha1 = "c42fa452a60f022e9e087823b47e5a5f8adc53d5" -uuid = "28d57a85-8fef-5791-bfe6-a80928e7c999" -version = "0.4.75" - -[[deps.Tullio]] -deps = ["ChainRulesCore", "DiffRules", "LinearAlgebra", "Requires"] -git-tree-sha1 = "7871a39eac745697ee512a87eeff06a048a7905b" -uuid = "bc48ee85-29a4-5162-ae0b-a64e1601d4bc" -version = "0.3.5" - -[[deps.UUIDs]] -deps = ["Random", "SHA"] -uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" - -[[deps.UnPack]] -git-tree-sha1 = "387c1f73762231e86e0c9c5443ce3b4a0a9a0c2b" -uuid = "3a884ed6-31ef-47d7-9d2a-63182c4928ed" -version = "1.0.2" - -[[deps.Unicode]] -uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" - -[[deps.VectorizationBase]] -deps = ["ArrayInterface", "CPUSummary", "HostCPUFeatures", "IfElse", "LayoutPointers", "Libdl", "LinearAlgebra", "SIMDTypes", "Static", "StaticArrayInterface"] -git-tree-sha1 = "b182207d4af54ac64cbc71797765068fdeff475d" -uuid = "3d5dd08c-fd9d-11e8-17fa-ed2836048c2f" -version = "0.21.64" - -[[deps.VersionParsing]] -git-tree-sha1 = "58d6e80b4ee071f5efd07fda82cb9fbe17200868" -uuid = "81def892-9a0e-5fdd-b105-ffc91e053289" -version = "1.3.0" - -[[deps.WoodburyMatrices]] -deps = ["LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "de67fa59e33ad156a590055375a30b23c40299d3" -uuid = "efce3f68-66dc-5838-9240-27a6d6f5f9b6" -version = "0.5.5" - -[[deps.Zlib_jll]] -deps = ["Libdl"] -uuid = "83775a58-1f1d-513f-b197-d71354ab007a" -version = "1.2.12+3" - -[[deps.ZygoteRules]] -deps = ["ChainRulesCore", "MacroTools"] -git-tree-sha1 = "977aed5d006b840e2e40c0b48984f7463109046d" -uuid = "700de1a5-db45-46bc-99cf-38207098b444" -version = "0.2.3" - -[[deps.libblastrampoline_jll]] -deps = ["Artifacts", "Libdl", "OpenBLAS_jll"] -uuid = "8e850b90-86db-534c-a0d3-1478176c7d93" -version = "5.1.1+0" - -[[deps.nghttp2_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" -version = "1.48.0+0" - -[[deps.p7zip_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" -version = "17.4.0+0" diff --git a/docs/make.jl b/docs/make.jl index deef9fec..4b3260e0 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -2,11 +2,18 @@ using CalibrateEmulateSample using Documenter -# using DocumenterCitations #---------- -examples = ["Lorenz example" => "examples/lorenz_example.md", "Turbulence example" => "examples/edmf_example.md"] +examples = [ + "Emulator testing" => [ + "examples/emulators/regression_2d_2d.md", + "examples/emulators/lorenz_integrator_3d_3d.md", + "examples/emulators/ishigami_3d_1d.md", + ], + "Lorenz example" => "examples/lorenz_example.md", + "Turbulence example" => "examples/edmf_example.md", +] design = ["AbstractMCMC sampling API" => "API/AbstractMCMC.md"] @@ -47,7 +54,6 @@ makedocs( pages = pages, modules = [CalibrateEmulateSample], doctest = false, - strict = true, clean = true, checkdocs = :none, ) diff --git a/docs/src/assets/GP_l63_attr.png b/docs/src/assets/GP_l63_attr.png new file mode 100644 index 00000000..eea033d2 Binary files /dev/null and b/docs/src/assets/GP_l63_attr.png differ diff --git a/docs/src/assets/GP_l63_cdfs.png b/docs/src/assets/GP_l63_cdfs.png new file mode 100644 index 00000000..b5f98a1e Binary files /dev/null and b/docs/src/assets/GP_l63_cdfs.png differ diff --git a/docs/src/assets/GP_l63_pdf.png b/docs/src/assets/GP_l63_pdf.png new file mode 100644 index 00000000..65427873 Binary files /dev/null and b/docs/src/assets/GP_l63_pdf.png differ diff --git a/docs/src/assets/GP_l63_test.png b/docs/src/assets/GP_l63_test.png new file mode 100644 index 00000000..254d559f Binary files /dev/null and b/docs/src/assets/GP_l63_test.png differ diff --git a/docs/src/assets/RF-nosvd-nonsep_l63_attr.png b/docs/src/assets/RF-nosvd-nonsep_l63_attr.png new file mode 100644 index 00000000..628489cf Binary files /dev/null and b/docs/src/assets/RF-nosvd-nonsep_l63_attr.png differ diff --git a/docs/src/assets/RF-nosvd-nonsep_l63_cdfs.png b/docs/src/assets/RF-nosvd-nonsep_l63_cdfs.png new file mode 100644 index 00000000..e890d2fc Binary files /dev/null and b/docs/src/assets/RF-nosvd-nonsep_l63_cdfs.png differ diff --git a/docs/src/assets/RF-nosvd-nonsep_l63_pdf.png b/docs/src/assets/RF-nosvd-nonsep_l63_pdf.png new file mode 100644 index 00000000..d42dc47e Binary files /dev/null and b/docs/src/assets/RF-nosvd-nonsep_l63_pdf.png differ diff --git a/docs/src/assets/RF-nosvd-nonsep_l63_test.png b/docs/src/assets/RF-nosvd-nonsep_l63_test.png new file mode 100644 index 00000000..7a4ffc0f Binary files /dev/null and b/docs/src/assets/RF-nosvd-nonsep_l63_test.png differ diff --git a/docs/src/assets/ishigami_slices_GP.png b/docs/src/assets/ishigami_slices_GP.png new file mode 100644 index 00000000..a1b7e98e Binary files /dev/null and b/docs/src/assets/ishigami_slices_GP.png differ diff --git a/docs/src/assets/ishigami_slices_RF-scalar.png b/docs/src/assets/ishigami_slices_RF-scalar.png new file mode 100644 index 00000000..0089b54e Binary files /dev/null and b/docs/src/assets/ishigami_slices_RF-scalar.png differ diff --git a/docs/src/assets/regression2d2d-gp-skljl_y1_predictions.png b/docs/src/assets/regression2d2d-gp-skljl_y1_predictions.png new file mode 100644 index 00000000..13d4806d Binary files /dev/null and b/docs/src/assets/regression2d2d-gp-skljl_y1_predictions.png differ diff --git a/docs/src/assets/regression2d2d-gp-skljl_y2_predictions.png b/docs/src/assets/regression2d2d-gp-skljl_y2_predictions.png new file mode 100644 index 00000000..bbf97093 Binary files /dev/null and b/docs/src/assets/regression2d2d-gp-skljl_y2_predictions.png differ diff --git a/docs/src/assets/regression2d2d-rf-nosvd-nonsep_y1_predictions.png b/docs/src/assets/regression2d2d-rf-nosvd-nonsep_y1_predictions.png new file mode 100644 index 00000000..81fc575f Binary files /dev/null and b/docs/src/assets/regression2d2d-rf-nosvd-nonsep_y1_predictions.png differ diff --git a/docs/src/assets/regression2d2d-rf-nosvd-nonsep_y2_predictions.png b/docs/src/assets/regression2d2d-rf-nosvd-nonsep_y2_predictions.png new file mode 100644 index 00000000..869185b2 Binary files /dev/null and b/docs/src/assets/regression2d2d-rf-nosvd-nonsep_y2_predictions.png differ diff --git a/docs/src/assets/regression2d2d-rf-scalar_y1_predictions.png b/docs/src/assets/regression2d2d-rf-scalar_y1_predictions.png new file mode 100644 index 00000000..1ab34f55 Binary files /dev/null and b/docs/src/assets/regression2d2d-rf-scalar_y1_predictions.png differ diff --git a/docs/src/assets/regression2d2d-rf-scalar_y2_predictions.png b/docs/src/assets/regression2d2d-rf-scalar_y2_predictions.png new file mode 100644 index 00000000..93c14010 Binary files /dev/null and b/docs/src/assets/regression2d2d-rf-scalar_y2_predictions.png differ diff --git a/docs/src/examples/emulators/ishigami_3d_1d.md b/docs/src/examples/emulators/ishigami_3d_1d.md new file mode 100644 index 00000000..ea604eea --- /dev/null +++ b/docs/src/examples/emulators/ishigami_3d_1d.md @@ -0,0 +1,174 @@ +# Global Sensitiviy Analysis for an emulated Ishigami function + +In this example, we assess directly the performance of our machine learning emulators. The task is to learn a function for use in a [global sensitivity analysis](https://en.wikipedia.org/wiki/Variance-based_sensitivity_analysis). In particular, we learn the Ishigami function +```math +f(x; a, b) = (1 + bx_3^4)\sin(x_1) + a \sin(x_2), \forall x\in [-\pi,\pi]^3 +``` +with ``a=7, b=0.1``. In this example, global sensitivity analysis refers to calculation of two Sobol indices. The first index collects proportions ``V_i`` (a.k.a `firstorder`) of the variance of ``f`` attributable to the input ``x_i``, and the second index collects proportions ``TV_i`` (a.k.a `totalorder`) of the residual variance having removed contributions attributable to inputs ``x_j`` ``\forall j\neq i``. The Ishigami function has an analytic formula for these Sobol indices, it is also known that one can obtain numerical approximation through quasi-Monto-Carlo methods by evaluating the Ishigami function on a special quasi-random Sobol sequence. + +To emulate the Ishigami function, the data consists of 300 pairs ``\{x,f(x)+\eta\}`` where ``\eta \sim N(0,\Sigma)`` is additive noise, and the x are sampled from the Sobol sequence. The emulators are validated by evaluating the posterior mean function on the full 16000 points of the Sobol sequence and the Sobol indices are estimated. We rerun the experiment for many repeats of the random feature hyperparameter optimization and present the statistics of these indices, as well as plotting a realization of the emulator. + +We use the package [GlobalSensitivityAnalysis.jl](https://github.com/lrennels/GlobalSensitivityAnalysis.jl) for many of the GSA tools. + +### Walkthrough of the code +We first load some standard packages +```julia +using Distributions +using DataStructures # for `OrderedDict` +using Random +using LinearAlgebra +using CairoMakie, ColorSchemes +``` +and the packages for providing the Ishigami function, Sobol sequence, and evaluation of the indices +```julia +using GlobalSensitivityAnalysis # for `SobolData` +const GSA = GlobalSensitivityAnalysis +``` +then the CES packages for the emulators +```julia +using CalibrateEmulateSample.Emulators # for `SKLJL`, `GaussianProcess`, `SeparableKernel`, `LowRankFactor`, `OneDimFactor`, `ScalarRandomFeatureInterface`, `Emulator` +using CalibrateEmulateSample.DataContainers # for `PairedDataContainer` +using CalibrateEmulateSample.EnsembleKalmanProcesses # for `DataMisfitController` +``` +We set up the sampling procedure, evaluate the true ishigami function on these points, and calculate the sobol indices +```julia +n_data_gen = 2000 + +data = SobolData( + params = OrderedDict(:x1 => Uniform(-π, π), :x2 => Uniform(-π, π), :x3 => Uniform(-π, π)), + N = n_data_gen, +) + +# To perform global analysis, +# one must generate samples using Sobol sequence (i.e. creates more than N points) +samples = GSA.sample(data) +n_data = size(samples, 1) # [n_samples x 3] +# run model (example) +y = GSA.ishigami(samples) +# perform Sobol Analysis +result = analyze(data, y) +``` +Next we create the noisy training data from the sequence samples +```julia +n_train_pts = 300 +ind = shuffle!(rng, Vector(1:n_data))[1:n_train_pts] +# now subsample the samples data +n_tp = length(ind) +input = zeros(3, n_tp) +output = zeros(1, n_tp) +Γ = 1e-2 +noise = rand(rng, Normal(0, Γ), n_tp) +for i in 1:n_tp + input[:, i] = samples[ind[i], :] + output[i] = y[ind[i]] + noise[i] +end +iopairs = PairedDataContainer(input, output) +``` +We have a few cases for the user to investigate +```julia +cases = [ + "Prior", # Scalar random feature emulator with no hyperparameter learning + "GP", # Trained Sci-kit learn Gaussian process emulator + "RF-scalar", # Trained scalar random feature emulator +] +``` +Each case sets up a different machine learning configuration in the `Emulator` object. + +For the random feature case, `RF-scalar`, we use a rank-3 kernel in the input space, and 500 features for prediction, though for efficiency we use only 200 when learning the hyperparameters. The relevant code snippets are +```julia +nugget = Float64(1e-12) +overrides = Dict( + "scheduler" => DataMisfitController(terminate_at = 1e4), + "n_features_opt" => 200, +) + +kernel_structure = SeparableKernel(LowRankFactor(3, nugget), OneDimFactor()) +n_features = 500 +mlt = ScalarRandomFeatureInterface( + n_features, + 3, + rng = rng, + kernel_structure = kernel_structure, + optimizer_options = overrides, +) +``` +For the gaussian process case `GP` we use the sci-kit learn package, a default squared exponential kernel with lengthscale learnt in each input dimensions. We do not learn an additional white kernel for the noise. +```julia +gppackage = Emulators.SKLJL() +pred_type = Emulators.YType() +mlt = GaussianProcess( + gppackage; + prediction_type = pred_type, + noise_learn = false, +) +``` +We finish by building the emulator object +```julia +emulator = Emulator(mlt, iopairs; obs_noise_cov = Γ * I, decorrelate = decorrelate) +optimize_hyperparameters!(emulator) # (only needed for some Emulator packages) +``` + +### Results and plots + +We validate the emulator by evaluating it on the entire Sobol sequence, and calculating the Sobol indices (presenting mean and std if using multiple repeats. + +```julia +# predict on all Sobol points with emulator (example) +y_pred, y_var = predict(emulator, samples', transform_to_real = true) + +# obtain emulated Sobol indices +result_pred = analyze(data, y_pred') +``` + +## Gaussian Process Emulator (sci-kit learn `GP`) + +Here is the plot for one emulation +```@raw html + +``` +and the outputted table of Sobol indices +``` +True Sobol Indices +****************** + firstorder: [0.31390519114781146, 0.44241114479004084, 0.0] + totalorder: [0.5575888552099592, 0.44241114479004084, 0.24368366406214775] + +Sampled truth Sobol Indices (# points 16000) +*************************** + firstorder: [0.31261591941512257, 0.441887746224135, -0.005810964687365922] + totalorder: [0.5623611180844434, 0.44201284296404386, 0.24465876318633062] + +Sampled Emulated Sobol Indices (# obs 300, noise var 0.01) +*************************************************************** + firstorder: [0.3094638183079643, 0.4518400892052567, -0.007351344957260407] + totalorder: [0.5502469909342245, 0.4587734278791574, 0.23542404141319245] +``` + +## Random feature emulator (Separable Low-rank kernel `RF-scalar`) + +Here is the plot for one emulation +```@raw html + +``` + +Table for 20 repeats of the algorithm +``` +True Sobol Indices +****************** + firstorder: [0.31390519114781146, 0.44241114479004084, 0.0] + totalorder: [0.5575888552099592, 0.44241114479004084, 0.24368366406214775] + +Sampled truth Sobol Indices (# points 16000) +*************************** + firstorder: [0.31261591941512257, 0.441887746224135, -0.005810964687365922] + totalorder: [0.5623611180844434, 0.44201284296404386, 0.24465876318633062] + +Sampled Emulated Sobol Indices (# obs 300, noise var 0.01) +*************************************************************** +(mean) firstorder: [0.33605548545490044, 0.41116050093679196, -0.0012213648484969539] +(std) firstorder: [0.05909336956162543, 0.11484966121124164, 0.012908533302492602] +(mean) totalorder: [0.5670345355855254, 0.4716028261179354, 0.24108222433317] +(std) totalorder: [0.10619345801872732, 0.1041023777237331, 0.07200225781785778] + +``` + diff --git a/docs/src/examples/emulators/lorenz_integrator_3d_3d.md b/docs/src/examples/emulators/lorenz_integrator_3d_3d.md new file mode 100644 index 00000000..735c363e --- /dev/null +++ b/docs/src/examples/emulators/lorenz_integrator_3d_3d.md @@ -0,0 +1,167 @@ +# Integrating Lorenz 63 with an emulated integrator + +In this example, we assess directly the performance of our machine learning emulators. The task is to learn the forward Euler integrator of a [Lorenz 63 system](https://en.wikipedia.org/wiki/Lorenz_system). The model parameters are set to their classical settings ``(\sigma, \rho, \beta) = (10,28,\frac{8}{3})`` to exhibit chaotic behavior. The discrete system is given as: + +```math +\begin{aligned} +x(t_{n+1}) &= x(t_n) + \Delta t(y(t_n) - x(t_n))\\ +y(t_{n+1}) &= y(t_n) + \Delta t(x(t_n)(28 - z(t_n)) - y(t_n))\\ +z(t_{n+1}) &= z(t_n) + \Delta t(x(t_n)y(t_n) - \frac{8}{3}z(t_n)) +\end{aligned} +``` +where ``t_n = n\Delta t``. The data consists of pairs ``\{ (x(t_k),y(t_k),z(t_k)), (x(t_{k+1}),y(t_{k+1}),z(t_{k+1})+\eta_k\}`` for 600 values of ``k``, with each output subjected to independent, additive Gaussian noise ``\eta_k\sim N(0,\Gamma_y)``. + +We have several different emulator configurations in this example that the user can play around with. The goal of the emulator is that the posterior mean will approximate this discrete update map, or integrator, for any point on the Lorenz attractor from the sparse noisy data. To validate this, we recursively apply the trained emulator to the state, plotting the evolution of the trajectory and marginal statistics of the states over short and long timescales. We include a repeats option (`n_repeats`) to run the randomized training for multiple trials and illustrate robustness of marginal statistics by plotting long time marginal cdfs of the state. + +We will term scalar-output Gaussin process emulator as "GP", and scalar- or vector-output random feature emulator as "RF". + +## Walkthrough of the code + +We first import some standard packages +```julia +using Random, Distributions, LinearAlgebra # utilities +using CairoMakie, ColorSchemes # for plots +using JLD2 # for saved data +``` +For the true integrator of the Lorenz system we import +```julia +using OrdinaryDiffEq +``` +For relevant CES packages needed to define the emulators, packages and kernel structures +```julia +using CalibrateEmulateSample.Emulators +# Contains `Emulator`, `GaussianProcess`, `ScalarRandomFeatureInterface`, `VectorRandomFeatureInterface` +# `GPJL`, `SeparablKernel`, `NonSeparableKernel`, `OneDimFactor`, `LowRankFactor`, `DiagonalFactor` +using CalibrateEmulateSample.DataContainers # Contains `PairedDataContainer` +``` +and if one wants to play with optimizer options for the random feature emulators we import +```julia +using CalibrateEmulateSample.EnsembleKalmanProcesses # Contains `DataMisfitController` +``` + +We generate the truth data using `OrdinaryDiffEq`: the time series `sol` is used for training data, `sol_test` is used for plotting short time trajectories, and `sol_hist` for plotting histograms of the state over long times: +```julia +# Run L63 from 0 -> tmax +u0 = [1.0; 0.0; 0.0] +tmax = 20 +dt = 0.01 +tspan = (0.0, tmax) +prob = ODEProblem(lorenz, u0, tspan) +sol = solve(prob, Euler(), dt = dt) + +# Run L63 from end for test trajectory data +tmax_test = 100 +tspan_test = (0.0, tmax_test) +u0_test = sol.u[end] +prob_test = ODEProblem(lorenz, u0_test, tspan_test) +sol_test = solve(prob_test, Euler(), dt = dt) + +# Run L63 from end for histogram matching data +tmax_hist = 1000 +tspan_hist = (0.0, tmax_hist) +u0_hist = sol_test.u[end] +prob_hist = ODEProblem(lorenz, u0_hist, tspan_hist) +sol_hist = solve(prob_hist, Euler(), dt = dt) +``` + +We generate the training data from `sol` within `[tburn, tmax]`. The user has the option of how many training points to take `n_train_pts` and whether these are selected randomly or sequentially (`sample_rand`). The true outputs are perturbed by noise of variance `1e-4` and pairs are stored in the compatible data format `PairedDataContainer` +```julia +tburn = 1 # NB works better with no spin-up! +burnin = Int(floor(tburn / dt)) +n_train_pts = 600 +sample_rand = true +if sample_rand + ind = Int.(shuffle!(rng, Vector(burnin:(tmax / dt - 1)))[1:n_train_pts]) +else + ind = burnin:(n_train_pts + burnin) +end +n_tp = length(ind) +input = zeros(3, n_tp) +output = zeros(3, n_tp) +Γy = 1e-4 * I(3) +noise = rand(rng, MvNormal(zeros(3), Γy), n_tp) +for i in 1:n_tp + input[:, i] = sol.u[ind[i]] + output[:, i] = sol.u[ind[i] + 1] + noise[:, i] +end +iopairs = PairedDataContainer(input, output) +``` +We have several cases the user can play with, +```julia +cases = ["GP", "RF-scalar", "RF-scalar-diagin", "RF-svd-nonsep", "RF-nosvd-nonsep", "RF-nosvd-sep"] +``` +Then, looping over the repeats, we first define some common hyperparamter optimization options for the `"RF-"` cases. In this case, the options are used primarily for diagnostics and acceleration (not required in general to solve this problem) +```julia +rf_optimizer_overrides = Dict( + "verbose" => true, # output diagnostics from the hyperparameter optimizer + "scheduler" => DataMisfitController(terminate_at = 1e4), # timestepping method for the optimizer + "cov_sample_multiplier" => 0.5, # 0.5*default number of samples to estimate covariances in optimizer + "n_features_opt" => 200, # number of features during hyperparameter optimization + "n_iteration" => 20, # iterations of the optimizer solver +) +``` +Then we build the machine learning tools. Here we highlight scalar-output Gaussian process (`GP`), where we use the default squared-exponential kernel, and learn a lengthscale hyperparameter in each input dimension. To handle multiple outputs, we will use a decorrelation in the output space, and so will actually train three of these models. +```julia +gppackage = Emulators.GPJL() # use GaussianProcesses.jl +pred_type = Emulators.YType() # predicted variances are for data not latent function +mlt = GaussianProcess( + gppackage; + prediction_type = pred_type, + noise_learn = false, # do not additionally learn a white kernel +) +``` +we also highlight the Vector Random Feature with nonseparable kernel (`RF-nosvd-nonsep`), this can natively handle multiple outputs without decorrelation of the output space. This kernel is a rank-3 representation with small nugget term. +```julia +nugget = 1e-12 +kernel_structure = NonseparableKernel(LowRankFactor(3, nugget)) +n_features = 500 # number of features for prediction +mlt = VectorRandomFeatureInterface( + n_features, + 3, # input dimension + 3, # output dimension + rng = rng, # pin random number generator + kernel_structure = kernel_structure, + optimizer_options = rf_optimizer_overrides, +) +``` +With machine learning tools specified, we build the emulator object +```julia +# decorrelate = true for `GP` +# decorrelate = false for `RF-nosvd-nonsep` +emulator = Emulator(mlt, iopairs; obs_noise_cov = Γy, decorrelate = decorrelate) +optimize_hyperparameters!(emulator) # some GP packages require this additional call +``` + +## Plots + +We predict the trained emulator mean, over the short-timescale validation trajectory +```julia +u_test_tmp = zeros(3, length(xspan_test)) +u_test_tmp[:, 1] = sol_test.u[1] # initialize at the final-time solution of the training period + +for i in 1:(length(xspan_test) - 1) + rf_mean, _ = predict(emulator, u_test_tmp[:, i:i], transform_to_real = true) # 3x1 matrix + u_test_tmp[:, i + 1] = rf_mean +end +``` +The other trajectories are similar. We then produce the following plots. In all figures, the results from evolving the state with the true integrator is orange, and with the emulated integrators are blue. + +### Gaussian Process Emulator (Sci-kit learn: `GP`) +For one example fit +```@raw html + + +``` + +### Random Feature Emulator (`RF-nosvd-nonsep`) +For one example fit +```@raw html + + +``` + +and here are CDFs over 20 randomized trials of the random feature hyperparameter optimization +```@raw html + +``` + diff --git a/docs/src/examples/emulators/regression_2d_2d.md b/docs/src/examples/emulators/regression_2d_2d.md new file mode 100644 index 00000000..52a98d36 --- /dev/null +++ b/docs/src/examples/emulators/regression_2d_2d.md @@ -0,0 +1,162 @@ +# Regression of ``\mathbb{R}^2 \to \mathbb{R}^2`` smooth function + +In this example, we assess directly the performance of our machine learning emulators. The task is to learn the function: + +```math +G\colon [0,2\pi]^2 \to \mathbb{R}^2, G(x_1,x_2) = (\sin(x_1) + \cos(x_2), \sin(x_1) - \cos(x_2)) +``` +observed at 150 points, subject to additive (and possibly correlated) Gaussian noise ``N(0,\Sigma)``. + +We have several different emulator configurations in this example that the user can play around with. The goal of this example is to predict the function (i.e. posterior mean) and uncertainty (i.e posterior pointwise variance) on a ``200\times200`` grid providing a mean square error between emulated and true function and with `plot_flag = true` we also save several plots. + +We will term scalar-output Gaussin process emulator as "GP", scalar-output random feature emulator as "scalar RF", and vector-output random feature emulator as "vector RF" henceforth. +## Walkthrough of the code + +We first import some standard packages +```julia +using Random +using StableRNGs +using Distributions +using Statistics +using LinearAlgebra +``` +and relevant CES packages needed to define the emulators, packages and kernel structures +```julia +using CalibrateEmulateSample.Emulators +# Contains `Emulator`, `GaussianProcess`, `ScalarRandomFeatureInterface`, `VectorRandomFeatureInterface` +# `GPJL`, `SKLJL`, `SeparablKernel`, `NonSeparableKernel`, `OneDimFactor`, `LowRankFactor`, `DiagonalFactor` +using CalibrateEmulateSample.DataContainers # Contains `PairedDataContainer` +``` +To play with the hyperparameter optimization of RF, the optimizer options sometimes require `EnsembleKalmanProcesses.jl` structures, so we load this too +```julia +using CalibrateEmulateSample.EnsembleKalmanProcesses # Contains `DataMisfitController` +``` +We have 9 cases that the user can toggle or customize +```julia +cases = [ + "gp-skljl", + "gp-gpjl", # Very slow prediction... + "rf-scalar", + "rf-svd-diag", + "rf-svd-nondiag", + "rf-nosvd-diag", + "rf-nosvd-nondiag", + "rf-svd-nonsep", + "rf-nosvd-nonsep", +] +``` +The first two are for GP with either `ScikitLearn.jl` or `GaussianProcesses.jl` interface. The third is for the scalar RF interface, which most closely follows exactly replacing a GP. The rest are examples of vector RF with different types of data processing, (svd = same processing as scalar RF, nosvd = unprocessed) and different RF kernel structures in the output space of increasing complexity/flexibility (diag = Separable diagonal, nondiag = Separable nondiagonal, nonsep = nonseparable nondiagonal). + +We set up the learning problem specification, defining input and output dimensions, and number of data to train on, and the function `g` and the perturbed samples `y` with correlated additive noise +```julia +n = 150 # number of training points +p = 2 # input dim +d = 2 # output dim +X = 2.0 * π * rand(p, n) +# G(x1, x2) +g1x = sin.(X[1, :]) .+ cos.(X[2, :]) +g2x = sin.(X[1, :]) .- cos.(X[2, :]) +gx = zeros(2, n) +gx[1, :] = g1x +gx[2, :] = g2x +# Add noise η +μ = zeros(d) +Σ = 0.1 * [[0.8, 0.1] [0.1, 0.5]] # d x d +noise_samples = rand(MvNormal(μ, Σ), n) +# y = G(x) + η +Y = gx .+ noise_samples +``` +We then enter this in a paired data container, which gives a standard of how the data will be read +```julia +iopairs = PairedDataContainer(X, Y, data_are_columns = true) +``` +We define some common settings for all emulators, e.g. the number of random features to use, and some hyperparameter optimizer options +```julia +# common Gaussian feature setup +pred_type = YType() + +# common random feature setup +n_features = 150 +optimizer_options = Dict("n_iteration" => 10, "scheduler" => DataMisfitController(on_terminate = "continue")) +nugget = 1e-12 +``` +We then build the emulators. An example for GP (`gp-skljl`) +```julia +# use scikit learn +gppackage = SKLJL() +# build a GP that learns an additional white noise kernel (along with the default RBF kernel) +gaussian_process = GaussianProcess(gppackage, noise_learn = true) +# the data processing normalizes input data, and decorrelates output data with information from Σ +emulator = Emulator(gaussian_process, iopairs, obs_noise_cov = Σ, normalize_inputs = true) +``` +An example for scalar RF (`rf-scalar`) +```julia +# build a scalar RF with a rank-2 kernel in input space (placeholder 1D kernel in output space) and use the optimizer options during training +srfi = ScalarRandomFeatureInterface( + n_features, + p, + kernel_structure = SeparableKernel(LowRankFactor(2, nugget), OneDimFactor()), + optimizer_options = optimizer_options, +) +# the data processing normalizes input data, and decorrelates output data with information from Σ +emulator = Emulator(srfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true) +``` +An example for vector RF (`rf-nosvd-nonsep`) +```julia +# build a vector RF with a rank-4 nonseparable kernel and use the optimizer options during training +vrfi = VectorRandomFeatureInterface( + n_features, + p, + d, # additionally provide the output dimensions size + kernel_structure = NonseparableKernel(LowRankFactor(4, nugget)), + optimizer_options = optimizer_options, +) +# the data processing normalizes input data, and does not decorrelate outputs +emulator = Emulator(vrfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true, decorrelate = false) +``` +For RF and some GP packages, the training occurs during construction of the `Emulator`, however sometimes one must call an optimize step afterwards +``` +optimize_hyperparameters!(emulator) +``` +## Validation and Plots + +We create an evaluation grid for our models, in the right shape: +```julia +n_pts = 200 +x1 = range(0.0, stop = 2 * π, length = n_pts) +x2 = range(0.0, stop = 2 * π, length = n_pts) +X1, X2 = meshgrid(x1, x2) +inputs = permutedims(hcat(X1[:], X2[:]), (2, 1)) +``` +We predict using the emulators at the new inputs, and `transform_to_real` inverts the data processing back to physical values +```julia +em_mean, em_cov = predict(emulator, inputs, transform_to_real = true) +``` +We then plot the predicted mean and pointwise variances, and calculate the errors from the three highlighted cases: + +### Gaussian Process Emulator (Sci-kit learn: `gp-skljl`) +``` +L^2 error of mean and latent truth:0.0008042391077774167 +``` +```@raw html + + +``` +### Random Feature Emulator (`rf-scalar`) +``` +L^2 error of mean and latent truth:0.0012253119679379056 +``` + +```@raw html + + +``` +### Random Feature Emulator (vector: `rf-nosvd-nonsep`) +``` +L^2 error of mean and latent truth:0.0011094292509180393 +``` + +```@raw html + + +``` \ No newline at end of file diff --git a/examples/Emulator/GaussianProcess/Project.toml b/examples/Emulator/GaussianProcess/Project.toml deleted file mode 100644 index faaeb690..00000000 --- a/examples/Emulator/GaussianProcess/Project.toml +++ /dev/null @@ -1,14 +0,0 @@ -[deps] -CalibrateEmulateSample = "95e48a1f-0bec-4818-9538-3db4340308e3" -Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d" -Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" -FiniteDiff = "6a86dc24-6348-571c-b903-95158fe2bd41" -LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" -Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80" -PyPlot = "d330b81b-6aea-500a-939a-2ce795aea3ee" -Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" -Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" - -[compat] -FiniteDiff = "~2.10" -julia = "1.6" diff --git a/examples/Emulator/GaussianProcess/learn_noise.jl b/examples/Emulator/GaussianProcess/learn_noise.jl deleted file mode 100644 index 1f02a7e0..00000000 --- a/examples/Emulator/GaussianProcess/learn_noise.jl +++ /dev/null @@ -1,161 +0,0 @@ -# Reference the in-tree version of CalibrateEmulateSample on Julias load path - -# Import modules -using Random -using Distributions -using Statistics -using LinearAlgebra -using CalibrateEmulateSample.Emulators -using CalibrateEmulateSample.DataContainers - -############################################################################### -# # -# This examples shows how to fit a Gaussian Process (GP) regression model # -# using Emulator/GaussianProcess and demonstrates how the GaussianProcess's built-in Singular # -# Value Decomposition (SVD) decorrelates the training data and maps into # -# a space where the covariance of the observational noise is the identity. # -# # -# When training a GP, the hyperparameters of its kernel (or covariance # -# function) are optimized with respect to the given input-output pairs. # -# When training a separate GP for each output component (as is usually # -# done for multidimensional output, rather than fitting one multi-output # -# model), one assumes that their covariance functions are independent. # -# The SVD transformation ensures that this is a valid assumption. # -# # -# The decorrelation by the SVD is done automatically when instantiating # -# a `GaussianProcess`, but the user can choose to have the `predict` function # -# return its predictions in the original space (by setting # -# transform_to_real=true) # -# # -# Training points: {(x_i, y_i)} (i=1,...,n), where x_i ∈ ℝ ² and y_i ∈ ℝ ² # -# The y_i are assumed to be related to the x_i by: # -# y_i = G(x_i) + η, where G is a map from ℝ ² to ℝ ², and η is noise drawn # -# from a 2D normal distribution with known mean μ and covariance Σ # -# Two Gaussian Process models are fit, one that predicts y_i[1] from x_i # -# and one that predicts y_i[2] from x_i. # -# # -# The GaussianProcess module can be used as a standalone module to fit # -# Gaussian Process regression models, but it was originally designed as # -# the "Emulate" step of the "Calibrate-Emulate-Sample" framework # -# developed at CliMA. # -# Further reading on Calibrate-Emulate-Sample: # -# Cleary et al., 2020 # -# https://arxiv.org/abs/2001.03689 # -# # -############################################################################### - -rng_seed = 41 -Random.seed!(rng_seed) - -gppackage = GPJL() -prediction_type = YType() -gauss_proc_1 = GaussianProcess( - gppackage; - kernel = nothing, # use default squared exponential kernel - prediction_type = prediction_type, - noise_learn = true, -) - -# Generate training data (x-y pairs, where x ∈ ℝ ᵖ, y ∈ ℝ ᵈ) -# x = [x1, x2]: inputs/predictors/features/parameters -# y = [y1, y2]: outputs/predictands/targets/observables -# The observables y are related to the parameters x by: -# y = G(x1, x2) + η, -# where G(x1, x2) := [sin(x1) + cos(x2), sin(x1) - cos(x2)], and η ~ N(0, Σ) -n = 200 # number of training points -p = 2 # input dim -d = 2 # output dim - -X = 2.0 * π * rand(p, n) - -# G(x1, x2) -g1x = sin.(X[1, :]) .+ cos.(X[2, :]) -g2x = sin.(X[1, :]) .- cos.(X[2, :]) -gx = zeros(2, n) -gx[1, :] = g1x -gx[2, :] = g2x -# Add noise η -μ = zeros(d) -Σ = 0.1 * [[0.8, 0.2] [0.2, 0.5]] # d x d -noise_samples = rand(MvNormal(μ, Σ), n) - -# y = G(x) + η -Y = gx .+ noise_samples - -# Fit 2D Gaussian Process regression model -# (To be precise: We fit two models, one that predicts y1 from x1 and x2, and -# one that predicts y2 from x1 and x2) -# Setting GPkernel=nothing leads to the creation of a default squared -# exponential kernel. -# Setting noise_learn=true leads to the addition of white noise to the kernel, -# whose hyperparameter -- the signal variance, or "noise" -- will be learned -# in the training phase via an optimization procedure. -# Because of the SVD transformation applied to the output, we expect the -# learned noise to be close to 1. -iopairs = PairedDataContainer(X, Y, data_are_columns = true) -@assert get_inputs(iopairs) == X -@assert get_outputs(iopairs) == Y - -emulator_1 = Emulator(gauss_proc_1, iopairs, obs_noise_cov = Σ, normalize_inputs = true) -println("build GP with ", n, " training points") -#optimize the hyperparameters to best fit the GP. -optimize_hyperparameters!(emulator_1) -println("GP trained") - -# gpobj1 = GaussianProcess(iopairs, gppackage, GPkernel=nothing, obs_noise_cov=Σ, -# normalized=true, noise_learn=true, prediction_type=pred_type) - -println("\n-----------") -println("Results of training Gaussian Process models with noise_learn=true") -println("-----------") - -println("\nKernel of the GP trained to predict y1 from x=(x1, x2):") -println(emulator_1.machine_learning_tool.models[1].kernel) -println("Learned noise parameter, σ_1:") -learned_σ_1 = sqrt(emulator_1.machine_learning_tool.models[1].kernel.kright.σ2) -println("σ_1 = $learned_σ_1") -# Check if the learned noise is approximately 1 -@assert(isapprox(learned_σ_1, 1.0; atol = 0.1)) - -println("\nKernel of the GP trained to predict y2 from x=(x1, x2):") -println(emulator_1.machine_learning_tool.models[2].kernel) -println("Learned noise parameter, σ_2:") -learned_σ_2 = sqrt(emulator_1.machine_learning_tool.models[2].kernel.kright.σ2) -println("σ_2 = $learned_σ_2") -# Check if the learned noise is approximately 1 -@assert(isapprox(learned_σ_2, 1.0; atol = 0.1)) -println("------------------------------------------------------------------\n") - -# For comparison: When noise_learn is set to false, the observational noise -# is set to 1.0 and is not learned/optimized during the training. But thanks -# to the SVD, 1.0 is the correct value to use. -gauss_proc_2 = GaussianProcess( - gppackage; - kernel = nothing, # use default squared exponential kernel - prediction_type = prediction_type, - noise_learn = false, -) -emulator_2 = Emulator(gauss_proc_2, iopairs, obs_noise_cov = Σ, normalize_inputs = true) -println("build GP with ", n, " training points") -#optimize the hyperparameters to best fit the GP. -optimize_hyperparameters!(emulator_2) -println("GP trained") - -println("\n-----------") -println("Results of training Gaussian Process models with noise_learn=false") -println("-----------") - -println("\nKernel of the GP trained to predict y1 from x=(x1, x2):") -# Note: In contrast to the kernels of the gpobj1 models, these ones do not -# have a white noise ("Noise") kernel component -println(emulator_2.machine_learning_tool.models[1].kernel) -# logNoise is given as log(sqrt(noise)) -obs_noise_1 = exp(emulator_2.machine_learning_tool.models[1].logNoise.value^2) -println("Observational noise: $obs_noise_1") - -println("\nKernel of the GP trained to predict y1 from x=(x1, x2):") -println(emulator_2.machine_learning_tool.models[2].kernel) -# logNoise is given as log(sqrt(noise)) -obs_noise_2 = exp(emulator_2.machine_learning_tool.models[2].logNoise.value^2) -println("Observational noise: $obs_noise_2") -println("------------------------------------------------------------------") diff --git a/examples/Emulator/GaussianProcess/plot_GP.jl b/examples/Emulator/GaussianProcess/plot_GP.jl deleted file mode 100644 index 721382ce..00000000 --- a/examples/Emulator/GaussianProcess/plot_GP.jl +++ /dev/null @@ -1,316 +0,0 @@ -# Reference the in-tree version of CalibrateEmulateSample on Julias load path -include(joinpath(@__DIR__, "..", "..", "ci", "linkfig.jl")) - -# Import modules -using Random -using Distributions -using Statistics -using LinearAlgebra -using CalibrateEmulateSample.Emulators -using CalibrateEmulateSample.DataContainers - -plot_flag = true -if plot_flag - ENV["GKSwstype"] = "100" - using Plots - gr(size = (1500, 700)) - Plots.scalefontsizes(1.3) - font = Plots.font("Helvetica", 18) - fontdict = Dict(:guidefont => font, :xtickfont => font, :ytickfont => font, :legendfont => font) - -end - -############################################################################### -# # -# This examples shows how to fit a Gaussian Process regression model # -# using GaussianProcess, and how to plot the mean and variance # -# # -# Training points: {(x_i, y_i)} (i=1,...,n), where x_i ∈ ℝ ² and y_i ∈ ℝ ² # -# The y_i are assumed to be related to the x_i by: # -# y_i = G(x_i) + η, where G is a map from ℝ ² to ℝ ², and η is noise drawn # -# from a 2D normal distribution with known mean μ and covariance Σ # -# # -# Two Gaussian Process models are fit, one that predicts y_i[1] from x_i # -# and one that predicts y_i[2] from x_i. Fitting separate models for # -# y_i[1] and y_i[2] requires the outputs to be independent - since this # -# cannot be assumed to be the case a priori, the training data are # -# decorrelated by performing a Singular Value Decomposition (SVD) on the # -# noise covariance Σ, and each model is then trained in the decorrelated # -# space. # -# # -# The decorrelation is done automatically when instantiating a `GaussianProcess`, # -# but the user can choose to have the `predict` function return its # -# predictions in the original space (by setting transform_to_real=true) # -# # -# The GaussianProcessEmulator module can be used as a standalone module to fit # -# Gaussian Process regression models, but it was originally designed as # -# the "Emulate" step of the "Calibrate-Emulate-Sample" framework # -# developed at CliMA. # -# Further reading on Calibrate-Emulate-Sample: # -# Cleary et al., 2020 # -# https://arxiv.org/abs/2001.03689 # -# # -############################################################################### - -function meshgrid(vx::AbstractVector{T}, vy::AbstractVector{T}) where {T} - m, n = length(vy), length(vx) - gx = reshape(repeat(vx, inner = m, outer = 1), m, n) - gy = reshape(repeat(vy, inner = 1, outer = n), m, n) - - return gx, gy -end - -rng_seed = 41 -Random.seed!(rng_seed) - -output_directory = joinpath(@__DIR__, "output") -if !isdir(output_directory) - mkdir(output_directory) -end - -#create the machine learning tools: Gaussian Process -gppackage = SKLJL() -pred_type = YType() -gaussian_process = GaussianProcess(gppackage, noise_learn = false) - -# Generate training data (x-y pairs, where x ∈ ℝ ᵖ, y ∈ ℝ ᵈ) -# x = [x1, x2]: inputs/predictors/features/parameters -# y = [y1, y2]: outputs/predictands/targets/observables -# The observables y are related to the parameters x by: -# y = G(x1, x2) + η, -# where G(x1, x2) := [sin(x1) + cos(x2), sin(x1) - cos(x2)], and η ~ N(0, Σ) -n = 150 # number of training points -p = 2 # input dim -d = 2 # output dim - -X = 2.0 * π * rand(p, n) -# G(x1, x2) -g1x = sin.(X[1, :]) .+ cos.(X[2, :]) -g2x = sin.(X[1, :]) .- cos.(X[2, :]) -gx = zeros(2, n) -gx[1, :] = g1x -gx[2, :] = g2x - -# Add noise η -μ = zeros(d) -Σ = 0.1 * [[0.8, 0.1] [0.1, 0.5]] # d x d -noise_samples = rand(MvNormal(μ, Σ), n) -# y = G(x) + η -Y = gx .+ noise_samples - - -#plot training data with and without noise -if plot_flag - p1 = plot( - X[1, :], - X[2, :], - g1x, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - - figpath = joinpath(output_directory, "GP_test_observed_y1nonoise.png") - savefig(figpath) - - p2 = plot( - X[1, :], - X[2, :], - g2x, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - figpath = joinpath(output_directory, "GP_test_observed_y2nonoise.png") - savefig(figpath) - - p3 = plot( - X[1, :], - X[2, :], - Y[1, :], - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - figpath = joinpath(output_directory, "GP_test_observed_y1.png") - savefig(figpath) - - p4 = plot( - X[1, :], - X[2, :], - Y[2, :], - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - figpath = joinpath(output_directory, "GP_test_observed_y2.png") - savefig(figpath) - -end - -iopairs = PairedDataContainer(X, Y, data_are_columns = true) -@assert get_inputs(iopairs) == X -@assert get_outputs(iopairs) == Y - -# Create the emulator class, and build the 2D Gaussian Process regression model -# (To be precise: We fit two models, one that predicts y1 from x1 and x2, -# and one that predicts y2 from x1 and x2) -# Setting GPkernel=nothing leads to the creation of a default squared -# exponential kernel. -# Setting noise_learn=true leads to the addition of white noise to the -# kernel -emulator = Emulator(gaussian_process, iopairs, obs_noise_cov = Σ, normalize_inputs = false) -println("build GP with ", n, " training points") - -#optimize the hyperparameters to best fit the GP. -optimize_hyperparameters!(emulator) -println("GP trained") - -#gpobj = GaussianProcess(iopairs, gppackage, GPkernel=nothing, obs_noise_cov=Σ, -# normalized=true, noise_learn=true, prediction_type=pred_type) - -# Plot mean and variance of the predicted observables y1 and y2 -# For this, we generate test points on a x1-x2 grid. -n_pts = 200 -x1 = range(0.0, stop = (4.0 / 5.0) * 2 * π, length = n_pts) -x2 = range(0.0, stop = (4.0 / 5.0) * 2 * π, length = n_pts) -X1, X2 = meshgrid(x1, x2) -# Input for predict has to be of size N_samples x input_dim -inputs = permutedims(hcat(X1[:], X2[:]), (2, 1)) - -#gp_mean, gp_cov = GaussianProcessEmulator.predict(gpobj, -# inputs, -# transform_to_real=true) -# Predict on the grid points (note that `predict` returns the full -# covariance matrices, not just the variance -- gp_cov is a vector -# of covariance matrices) - -gp_mean, gp_cov = predict(emulator, inputs, transform_to_real = true) -println("end predictions at ", n_pts * n_pts, " points") - - -#plot predictions -for y_i in 1:d - gp_var_temp = [diag(gp_cov[j]) for j in 1:length(gp_cov)] # (40000,) - gp_var = permutedims(vcat([x' for x in gp_var_temp]...), (2, 1)) # 2 x 40000 - - mean_grid = reshape(gp_mean[y_i, :], n_pts, n_pts) # 2 x 40000 - if plot_flag - p5 = plot( - x1, - x2, - mean_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "mean of y" * string(y_i), - zguidefontrotation = 90, - ) - end - var_grid = reshape(gp_var[y_i, :], n_pts, n_pts) - if plot_flag - p6 = plot( - x1, - x2, - var_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "var of y" * string(y_i), - zguidefontrotation = 90, - ) - - plot(p5, p6, layout = (1, 2), legend = false) - - savefig(joinpath(output_directory, "GP_test_y" * string(y_i) * "_predictions.png")) - end -end - -# Plot the true components of G(x1, x2) -g1_true = sin.(inputs[1, :]) .+ cos.(inputs[2, :]) -g1_true_grid = reshape(g1_true, n_pts, n_pts) -if plot_flag - p7 = plot( - x1, - x2, - g1_true_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "sin(x1) + cos(x2)", - zguidefontrotation = 90, - ) - savefig(joinpath(output_directory, "GP_test_true_g1.png")) -end - -g2_true = sin.(inputs[1, :]) .- cos.(inputs[2, :]) -g2_true_grid = reshape(g2_true, n_pts, n_pts) -if plot_flag - p8 = plot( - x1, - x2, - g2_true_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "sin(x1) - cos(x2)", - zguidefontrotation = 90, - ) - g_true_grids = [g1_true_grid, g2_true_grid] - - savefig(joinpath(output_directory, "GP_test_true_g2.png")) - -end - -# Plot the difference between the truth and the mean of the predictions -for y_i in 1:d - - # Reshape gp_cov to size N_samples x output_dim - gp_var_temp = [diag(gp_cov[j]) for j in 1:length(gp_cov)] # (40000,) - gp_var = permutedims(vcat([x' for x in gp_var_temp]...), (2, 1)) # 40000 x 2 - - mean_grid = reshape(gp_mean[y_i, :], n_pts, n_pts) - var_grid = reshape(gp_var[y_i, :], n_pts, n_pts) - # Compute and plot 1/variance * (truth - prediction)^2 - - if plot_flag - zlabel = "1/var * (true_y" * string(y_i) * " - predicted_y" * string(y_i) * ")^2" - - p9 = plot( - x1, - x2, - sqrt.(1.0 ./ var_grid .* (g_true_grids[y_i] .- mean_grid) .^ 2), - st = :surface, - camera = (30, 60), - c = :magma, - zlabel = zlabel, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - - savefig(joinpath(output_directory, "GP_test_y" * string(y_i) * "_difference_truth_prediction.png")) - end -end - -#Plots.scalefontsizes(1/1.3) diff --git a/examples/Emulator/Ishigami/emulate.jl b/examples/Emulator/Ishigami/emulate.jl index b152a960..ae367270 100644 --- a/examples/Emulator/Ishigami/emulate.jl +++ b/examples/Emulator/Ishigami/emulate.jl @@ -21,6 +21,13 @@ function ishigami(x::MM; a = 7.0, b = 0.05) where {MM <: AbstractMatrix} return (1 .+ b * x[3,:].^4) * sin.(x[1,:]) + a * sin.(x[2,:]).^2 end =# + +output_directory = joinpath(@__DIR__, "output") +if !isdir(output_directory) + mkdir(output_directory) +end + + function main() rng = MersenneTwister(seed) @@ -53,8 +60,8 @@ function main() scatter!(axy, samples[:, 2], y[:], color = :orange) scatter!(axz, samples[:, 3], y[:], color = :orange) - save("ishigami_slices_truth.png", f1, px_per_unit = 3) - save("ishigami_slices_truth.pdf", f1, px_per_unit = 3) + save(joinpath(output_directory, "ishigami_slices_truth.png"), f1, px_per_unit = 3) + save(joinpath(output_directory, "ishigami_slices_truth.pdf"), f1, px_per_unit = 3) n_train_pts = 300 ind = shuffle!(rng, Vector(1:n_data))[1:n_train_pts] @@ -75,12 +82,8 @@ function main() decorrelate = true nugget = Float64(1e-12) - overrides = Dict( - "scheduler" => DataMisfitController(terminate_at = 1e4), - "cov_sample_multiplier" => 1.0, - "n_features_opt" => 100, - "n_iteration" => 10, - ) + overrides = + Dict("verbose" => true, "scheduler" => DataMisfitController(terminate_at = 1e4), "n_features_opt" => 200) if case == "Prior" # don't do anything overrides["n_iteration"] = 0 @@ -94,14 +97,9 @@ function main() # Build ML tools if case == "GP" - gppackage = Emulators.GPJL() + gppackage = Emulators.SKLJL() pred_type = Emulators.YType() - mlt = GaussianProcess( - gppackage; - kernel = nothing, # use default squared exponential kernel - prediction_type = pred_type, - noise_learn = false, - ) + mlt = GaussianProcess(gppackage; prediction_type = pred_type, noise_learn = false) elseif case ∈ ["RF-scalar", "Prior"] @@ -112,7 +110,7 @@ function main() 3, rng = rng, kernel_structure = kernel_structure, - optimizer_options = overrides, + optimizer_options = deepcopy(overrides), ) end @@ -171,7 +169,6 @@ function main() println("(std) totalorder: ", totalorder_std) end - # plots f2 = Figure(resolution = (1.618 * 900, 300), markersize = 4) @@ -185,8 +182,8 @@ function main() scatter!(axy_em, samples[ind, 2], y[ind] + noise, color = :red, markersize = 8) scatter!(axz_em, samples[ind, 3], y[ind] + noise, color = :red, markersize = 8) - save("ishigami_slices_$(case).png", f2, px_per_unit = 3) - save("ishigami_slices_$(case).pdf", f2, px_per_unit = 3) + save(joinpath(output_directory, "ishigami_slices_$(case).png"), f2, px_per_unit = 3) + save(joinpath(output_directory, "ishigami_slices_$(case).pdf"), f2, px_per_unit = 3) end diff --git a/examples/Emulator/L63/emulate.jl b/examples/Emulator/L63/emulate.jl index 4eda6217..d3af3eb7 100644 --- a/examples/Emulator/L63/emulate.jl +++ b/examples/Emulator/L63/emulate.jl @@ -5,13 +5,9 @@ using CairoMakie, ColorSchemes #for plots using JLD2 # CES -using CalibrateEmulateSample using CalibrateEmulateSample.Emulators -using CalibrateEmulateSample.ParameterDistributions using CalibrateEmulateSample.EnsembleKalmanProcesses using CalibrateEmulateSample.DataContainers -EKP = CalibrateEmulateSample.EnsembleKalmanProcesses - function lorenz(du, u, p, t) du[1] = 10.0 * (u[2] - u[1]) @@ -21,12 +17,19 @@ end function main() + output_directory = joinpath(@__DIR__, "output") + if !isdir(output_directory) + mkdir(output_directory) + end + # rng rng = MersenneTwister(1232434) - n_repeats = 1 # repeat exp with same data. + n_repeats = 20 # repeat exp with same data. println("run experiment $n_repeats times") + + # Run L63 from 0 -> tmax u0 = [1.0; 0.0; 0.0] tmax = 20 @@ -35,7 +38,7 @@ function main() prob = ODEProblem(lorenz, u0, tspan) sol = solve(prob, Euler(), dt = dt) - # Run L63 from end for test trajetory data + # Run L63 from end for test trajectory data tmax_test = 100 tspan_test = (0.0, tmax_test) u0_test = sol.u[end] @@ -67,7 +70,7 @@ function main() # Create training pairs (with noise) from subsampling [burnin,tmax] tburn = 1 # NB works better with no spin-up! burnin = Int(floor(tburn / dt)) - n_train_pts = 600 #600 + n_train_pts = 600 sample_rand = true if sample_rand ind = Int.(shuffle!(rng, Vector(burnin:(tmax / dt - 1)))[1:n_train_pts]) @@ -87,37 +90,29 @@ function main() # Emulate - cases = - ["GP", "RF-scalar", "RF-scalar-diagin", "RF-vector-svd-nonsep", "RF-vector-nosvd-nonsep", "RF-vector-nosvd-sep"] + cases = ["GP", "RF-scalar", "RF-scalar-diagin", "RF-svd-nonsep", "RF-nosvd-nonsep", "RF-nosvd-sep"] + + case = cases[1] - case = cases[5] - decorrelate = true nugget = Float64(1e-12) u_test = [] u_hist = [] train_err = [] for rep_idx in 1:n_repeats - overrides = Dict( - "verbose" => true, + rf_optimizer_overrides = Dict( "scheduler" => DataMisfitController(terminate_at = 1e4), "cov_sample_multiplier" => 0.5, - "n_features_opt" => 200, - "n_iteration" => 20, - "accelerator" => NesterovAccelerator(), + "n_features_opt" => 400, + "n_iteration" => 30, + "accelerator" => ConstantStepNesterovAccelerator(), ) # Build ML tools if case == "GP" gppackage = Emulators.GPJL() pred_type = Emulators.YType() - mlt = GaussianProcess( - gppackage; - kernel = nothing, # use default squared exponential kernel - prediction_type = pred_type, - noise_learn = false, - ) - + mlt = GaussianProcess(gppackage; prediction_type = pred_type, noise_learn = false) elseif case ∈ ["RF-scalar", "RF-scalar-diagin"] n_features = 10 * Int(floor(sqrt(3 * n_tp))) kernel_structure = @@ -128,9 +123,9 @@ function main() 3, rng = rng, kernel_structure = kernel_structure, - optimizer_options = overrides, + optimizer_options = rf_optimizer_overrides, ) - elseif case ∈ ["RF-vector-svd-nonsep"] + elseif case ∈ ["RF-svd-nonsep"] kernel_structure = NonseparableKernel(LowRankFactor(6, nugget)) n_features = 500 @@ -140,35 +135,38 @@ function main() 3, rng = rng, kernel_structure = kernel_structure, - optimizer_options = overrides, + optimizer_options = rf_optimizer_overrides, ) - elseif case ∈ ["RF-vector-nosvd-nonsep"] - kernel_structure = NonseparableKernel(LowRankFactor(3, nugget)) + elseif case ∈ ["RF-nosvd-nonsep"] + kernel_structure = NonseparableKernel(LowRankFactor(4, nugget)) n_features = 500 - decorrelate = false # don't do SVD mlt = VectorRandomFeatureInterface( n_features, 3, 3, rng = rng, kernel_structure = kernel_structure, - optimizer_options = overrides, + optimizer_options = rf_optimizer_overrides, ) - elseif case ∈ ["RF-vector-nosvd-sep"] + elseif case ∈ ["RF-nosvd-sep"] kernel_structure = SeparableKernel(LowRankFactor(3, nugget), LowRankFactor(3, nugget)) n_features = 500 - decorrelate = false # don't do SVD mlt = VectorRandomFeatureInterface( n_features, 3, 3, rng = rng, kernel_structure = kernel_structure, - optimizer_options = overrides, + optimizer_options = rf_optimizer_overrides, ) end # Emulate + if case ∈ ["RF-nosvd-nonsep", "RF-nosvd-sep"] + decorrelate = false + else + decorrelate = true + end emulator = Emulator(mlt, iopairs; obs_noise_cov = Γy, decorrelate = decorrelate) optimize_hyperparameters!(emulator) @@ -220,8 +218,8 @@ function main() current_figure() # save - save("l63_test.png", f, px_per_unit = 3) - save("l63_test.pdf", f, pt_per_unit = 3) + save(joinpath(output_directory, case * "_l63_test.png"), f, px_per_unit = 3) + save(joinpath(output_directory, case * "_l63_test.pdf"), f, pt_per_unit = 3) # plot attractor f3 = Figure() @@ -229,8 +227,8 @@ function main() lines(f3[2, 1], solplot[1, :], solplot[3, :], color = :orange) # save - save("l63_attr.png", f3, px_per_unit = 3) - save("l63_attr.pdf", f3, pt_per_unit = 3) + save(joinpath(output_directory, case * "_l63_attr.png"), f3, px_per_unit = 3) + save(joinpath(output_directory, case * "_l63_attr.pdf"), f3, pt_per_unit = 3) # plotting histograms f2 = Figure() @@ -243,16 +241,16 @@ function main() hist!(f2[1, 3], solhist[3, :], bins = 50, normalization = :pdf, color = (:orange, 0.5)) # save - save("l63_pdf.png", f2, px_per_unit = 3) - save("l63_pdf.pdf", f2, pt_per_unit = 3) + save(joinpath(output_directory, case * "_l63_pdf.png"), f2, px_per_unit = 3) + save(joinpath(output_directory, case * "_l63_pdf.pdf"), f2, pt_per_unit = 3) end end # save data - JLD2.save("l63_trainerr.jld2", "train_err", train_err) - JLD2.save("l63_histdata.jld2", "solhist", solhist, "uhist", u_hist) - JLD2.save("l63_testdata.jld2", "solplot", solplot, "uplot", u_test) + JLD2.save(joinpath(output_directory, case * "_l63_trainerr.jld2"), "train_err", train_err) + JLD2.save(joinpath(output_directory, case * "_l63_histdata.jld2"), "solhist", solhist, "uhist", u_hist) + JLD2.save(joinpath(output_directory, case * "_l63_testdata.jld2"), "solplot", solplot, "uplot", u_test) # compare marginal histograms to truth - rough measure of fit sol_cdf = sort(solhist, dims = 2) @@ -283,8 +281,8 @@ function main() # save - save("l63_cdfs.png", f4, px_per_unit = 3) - save("l63_cdfs.pdf", f4, pt_per_unit = 3) + save(joinpath(output_directory, case * "_l63_cdfs.png"), f4, px_per_unit = 3) + save(joinpath(output_directory, case * "_l63_cdfs.pdf"), f4, pt_per_unit = 3) end diff --git a/examples/Emulator/RandomFeature/scalar_optimize_and_plot_RF.jl b/examples/Emulator/RandomFeature/scalar_optimize_and_plot_RF.jl deleted file mode 100644 index 3ac0a64a..00000000 --- a/examples/Emulator/RandomFeature/scalar_optimize_and_plot_RF.jl +++ /dev/null @@ -1,280 +0,0 @@ -# Reference the in-tree version of CalibrateEmulateSample on Julias load path -include(joinpath(@__DIR__, "..", "..", "ci", "linkfig.jl")) - -# Import modules -using Random -using StableRNGs -using Distributions -using Statistics -using LinearAlgebra -using CalibrateEmulateSample.Emulators -using CalibrateEmulateSample.DataContainers -using CalibrateEmulateSample.ParameterDistributions -using CalibrateEmulateSample.EnsembleKalmanProcesses - - -case = "scalar" -println("running case $case") -kernel_structure = SeparableKernel(LowRankFactor(2, 1e-8), OneDimFactor()) # input and output(1D) cov structure in sep kernel -plot_flag = true -if plot_flag - ENV["GKSwstype"] = "100" - using Plots - gr(size = (1500, 700)) - # Plots.scalefontsizes(1.3) # scales on recursive calls to include.. - font = Plots.font("Helvetica", 18) - fontdict = Dict(:guidefont => font, :xtickfont => font, :ytickfont => font, :legendfont => font) - -end - -function meshgrid(vx::AbstractVector{T}, vy::AbstractVector{T}) where {T} - m, n = length(vy), length(vx) - gx = reshape(repeat(vx, inner = m, outer = 1), m, n) - gy = reshape(repeat(vy, inner = 1, outer = n), m, n) - - return gx, gy -end - -function main() - - rng_seed = 41 - Random.seed!(rng_seed) - output_directory = joinpath(@__DIR__, "output") - if !isdir(output_directory) - mkdir(output_directory) - end - - - - #problem - n = 150 # number of training points - p = 2 # input dim - d = 2 # output dim - X = 2.0 * π * rand(p, n) - # G(x1, x2) - g1x = sin.(X[1, :]) .+ cos.(X[2, :]) - g2x = sin.(X[1, :]) .- cos.(X[2, :]) - gx = zeros(2, n) - gx[1, :] = g1x - gx[2, :] = g2x - - # Add noise η - μ = zeros(d) - Σ = 0.1 * [[0.8, 0.1] [0.1, 0.5]] # d x d - noise_samples = rand(MvNormal(μ, Σ), n) - # y = G(x) + η - Y = gx .+ noise_samples - - iopairs = PairedDataContainer(X, Y, data_are_columns = true) - @assert get_inputs(iopairs) == X - @assert get_outputs(iopairs) == Y - - #plot training data with and without noise - if plot_flag - p1 = plot( - X[1, :], - X[2, :], - g1x, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - - figpath = joinpath(output_directory, "RF_observed_y1nonoise.png") - savefig(figpath) - - p2 = plot( - X[1, :], - X[2, :], - g2x, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - figpath = joinpath(output_directory, "RF_observed_y2nonoise.png") - savefig(figpath) - - p3 = plot( - X[1, :], - X[2, :], - Y[1, :], - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - figpath = joinpath(output_directory, "RF_observed_y1.png") - savefig(figpath) - - p4 = plot( - X[1, :], - X[2, :], - Y[2, :], - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - figpath = joinpath(output_directory, "RF_observed_y2.png") - savefig(figpath) - - end - - # setup random features - n_features = 400 - optimizer_options = Dict( - "n_iteration" => 20, - "n_ensemble" => 20, - "verbose" => true, - "scheduler" => DataMisfitController(terminate_at = 100.0), - ) #"Max" iterations (may do less) - - - srfi = ScalarRandomFeatureInterface( - n_features, - p, - kernel_structure = kernel_structure, - optimizer_options = optimizer_options, - ) - emulator = Emulator(srfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true) - println("build RF with $n training points and $(n_features) random features.") - - optimize_hyperparameters!(emulator) # although RF already optimized - - # Plot mean and variance of the predicted observables y1 and y2 - # For this, we generate test points on a x1-x2 grid. - n_pts = 200 - x1 = range(0.0, stop = 4.0 / 5.0 * 2 * π, length = n_pts) - x2 = range(0.0, stop = 4.0 / 5.0 * 2 * π, length = n_pts) - X1, X2 = meshgrid(x1, x2) - # Input for predict has to be of size N_samples x input_dim - inputs = permutedims(hcat(X1[:], X2[:]), (2, 1)) - - rf_mean, rf_cov = predict(emulator, inputs, transform_to_real = true) - println("end predictions at ", n_pts * n_pts, " points") - - - #plot predictions - for y_i in 1:d - rf_var_temp = [diag(rf_cov[j]) for j in 1:length(rf_cov)] # (40000,) - rf_var = permutedims(reduce(vcat, [x' for x in rf_var_temp]), (2, 1)) # 2 x 40000 - - mean_grid = reshape(rf_mean[y_i, :], n_pts, n_pts) # 2 x 40000 - if plot_flag - p5 = plot( - x1, - x2, - mean_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "mean of y" * string(y_i), - zguidefontrotation = 90, - ) - end - var_grid = reshape(rf_var[y_i, :], n_pts, n_pts) - if plot_flag - p6 = plot( - x1, - x2, - var_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "var of y" * string(y_i), - zguidefontrotation = 90, - ) - - plot(p5, p6, layout = (1, 2), legend = false) - - savefig(joinpath(output_directory, "RF_" * case * "_y" * string(y_i) * "_predictions.png")) - end - end - - # Plot the true components of G(x1, x2) - g1_true = sin.(inputs[1, :]) .+ cos.(inputs[2, :]) - g1_true_grid = reshape(g1_true, n_pts, n_pts) - if plot_flag - p7 = plot( - x1, - x2, - g1_true_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "sin(x1) + cos(x2)", - zguidefontrotation = 90, - ) - savefig(joinpath(output_directory, "RF_" * case * "_true_g1.png")) - end - - g2_true = sin.(inputs[1, :]) .- cos.(inputs[2, :]) - g2_true_grid = reshape(g2_true, n_pts, n_pts) - if plot_flag - p8 = plot( - x1, - x2, - g2_true_grid, - st = :surface, - camera = (30, 60), - c = :cividis, - xlabel = "x1", - ylabel = "x2", - zlabel = "sin(x1) - cos(x2)", - zguidefontrotation = 90, - ) - g_true_grids = [g1_true_grid, g2_true_grid] - - savefig(joinpath(output_directory, "RF_" * case * "_true_g2.png")) - - end - - # Plot the difference between the truth and the mean of the predictions - for y_i in 1:d - - # Reshape rf_cov to size N_samples x output_dim - rf_var_temp = [diag(rf_cov[j]) for j in 1:length(rf_cov)] # (40000,) - rf_var = permutedims(vcat([x' for x in rf_var_temp]...), (2, 1)) # 40000 x 2 - - mean_grid = reshape(rf_mean[y_i, :], n_pts, n_pts) - var_grid = reshape(rf_var[y_i, :], n_pts, n_pts) - # Compute and plot 1/variance * (truth - prediction)^2 - - if plot_flag - zlabel = "1/var * (true_y" * string(y_i) * " - predicted_y" * string(y_i) * ")^2" - - p9 = plot( - x1, - x2, - sqrt.(1.0 ./ var_grid .* (g_true_grids[y_i] .- mean_grid) .^ 2), - st = :surface, - camera = (30, 60), - c = :magma, - zlabel = zlabel, - xlabel = "x1", - ylabel = "x2", - zguidefontrotation = 90, - ) - - savefig(joinpath(output_directory, "RF_" * case * "_y" * string(y_i) * "_difference_truth_prediction.png")) - end - end -end - -main() diff --git a/examples/Emulator/RandomFeature/Project.toml b/examples/Emulator/Regression_2d_2d/Project.toml similarity index 100% rename from examples/Emulator/RandomFeature/Project.toml rename to examples/Emulator/Regression_2d_2d/Project.toml diff --git a/examples/Emulator/RandomFeature/vector_optimize_and_plot_RF.jl b/examples/Emulator/Regression_2d_2d/compare_regresssion.jl similarity index 70% rename from examples/Emulator/RandomFeature/vector_optimize_and_plot_RF.jl rename to examples/Emulator/Regression_2d_2d/compare_regresssion.jl index 6d134c98..be75e295 100644 --- a/examples/Emulator/RandomFeature/vector_optimize_and_plot_RF.jl +++ b/examples/Emulator/Regression_2d_2d/compare_regresssion.jl @@ -9,14 +9,12 @@ using Statistics using LinearAlgebra using CalibrateEmulateSample.Emulators using CalibrateEmulateSample.DataContainers -using CalibrateEmulateSample.ParameterDistributions using CalibrateEmulateSample.EnsembleKalmanProcesses plot_flag = true if plot_flag ENV["GKSwstype"] = "100" using Plots gr(size = (1500, 700)) - # Plots.scalefontsizes(1.3) font = Plots.font("Helvetica", 18) fontdict = Dict(:guidefont => font, :xtickfont => font, :ytickfont => font, :legendfont => font) @@ -40,9 +38,18 @@ function main() mkdir(output_directory) end - cases = ["svd-diag", "svd-nondiag", "nosvd-diag", "nosvd-nondiag", "svd-nonsep", "nosvd-nonsep"] - case_mask = 1:6 # (KEEP set to 1:6 when pushing for buildkite) - nugget = 1e-12 + cases = [ + "gp-skljl", + "gp-gpjl", # Very slow prediction... + "rf-scalar", + "rf-svd-diag", + "rf-svd-nondiag", + "rf-nosvd-diag", + "rf-nosvd-nondiag", + "rf-svd-nonsep", + "rf-nosvd-nonsep", + ] + case_mask = [1, 3:length(cases)...] # (KEEP set to 1:length(cases) when pushing for buildkite) #problem n = 150 # number of training points @@ -81,7 +88,7 @@ function main() zguidefontrotation = 90, ) - figpath = joinpath(output_directory, "RF_observed_y1nonoise.png") + figpath = joinpath(output_directory, "observed_y1nonoise.png") savefig(figpath) p2 = plot( @@ -95,7 +102,7 @@ function main() ylabel = "x2", zguidefontrotation = 90, ) - figpath = joinpath(output_directory, "RF_observed_y2nonoise.png") + figpath = joinpath(output_directory, "observed_y2nonoise.png") savefig(figpath) p3 = plot( @@ -109,7 +116,7 @@ function main() ylabel = "x2", zguidefontrotation = 90, ) - figpath = joinpath(output_directory, "RF_observed_y1.png") + figpath = joinpath(output_directory, "observed_y1.png") savefig(figpath) p4 = plot( @@ -123,72 +130,96 @@ function main() ylabel = "x2", zguidefontrotation = 90, ) - figpath = joinpath(output_directory, "RF_observed_y2.png") + figpath = joinpath(output_directory, "observed_y2.png") savefig(figpath) end for case in cases[case_mask] - println("running case $case") - # setup random features - n_features = 400 - optimizer_options = - Dict("n_iteration" => 5, "verbose" => true, "scheduler" => DataMisfitController(on_terminate = "continue")) #"Max" iterations (may do less) + println(" ") + @info "running case $case" - if case == "svd-diag" + # common Gaussian feature setup + pred_type = YType() + + # common random feature setup + n_features = 150 + optimizer_options = Dict("n_iteration" => 10, "scheduler" => DataMisfitController(on_terminate = "continue")) + nugget = 1e-12 + + + + if case == "gp-skljl" + gppackage = SKLJL() + gaussian_process = GaussianProcess(gppackage, noise_learn = true) + emulator = Emulator(gaussian_process, iopairs, obs_noise_cov = Σ, normalize_inputs = true) + elseif case == "gp-gpjl" + @warn "gp-gpjl case is very slow at prediction" + gppackage = GPJL() + gaussian_process = GaussianProcess(gppackage, noise_learn = true) + emulator = Emulator(gaussian_process, iopairs, obs_noise_cov = Σ, normalize_inputs = true) + elseif case == "rf-scalar" + srfi = ScalarRandomFeatureInterface( + n_features, + p, + kernel_structure = SeparableKernel(LowRankFactor(2, 1e-8), OneDimFactor()), + optimizer_options = optimizer_options, + ) + emulator = Emulator(srfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true) + elseif case == "rf-svd-diag" vrfi = VectorRandomFeatureInterface( n_features, p, d, kernel_structure = SeparableKernel(LowRankFactor(2, nugget), DiagonalFactor()), - optimizer_options = optimizer_options, + optimizer_options = deepcopy(optimizer_options), ) emulator = Emulator(vrfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true) - elseif case == "svd-nondiag" + elseif case == "rf-svd-nondiag" vrfi = VectorRandomFeatureInterface( n_features, p, d, kernel_structure = SeparableKernel(LowRankFactor(2, nugget), LowRankFactor(2, nugget)), - optimizer_options = optimizer_options, + optimizer_options = deepcopy(optimizer_options), ) emulator = Emulator(vrfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true) - elseif case == "svd-nonsep" + elseif case == "rf-svd-nonsep" vrfi = VectorRandomFeatureInterface( n_features, p, d, kernel_structure = NonseparableKernel(LowRankFactor(4, nugget)), - optimizer_options = optimizer_options, + optimizer_options = deepcopy(optimizer_options), ) emulator = Emulator(vrfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true) - elseif case == "nosvd-diag" + elseif case == "rf-nosvd-diag" vrfi = VectorRandomFeatureInterface( n_features, p, d, kernel_structure = SeparableKernel(LowRankFactor(2, nugget), DiagonalFactor()), # roughly normalize output by noise - optimizer_options = optimizer_options, + optimizer_options = deepcopy(optimizer_options), ) emulator = Emulator(vrfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true, decorrelate = false) - elseif case == "nosvd-nondiag" + elseif case == "rf-nosvd-nondiag" vrfi = VectorRandomFeatureInterface( n_features, p, d, kernel_structure = SeparableKernel(LowRankFactor(2, nugget), LowRankFactor(2, nugget)), # roughly normalize output by noise - optimizer_options = optimizer_options, + optimizer_options = deepcopy(optimizer_options), ) emulator = Emulator(vrfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true, decorrelate = false) - elseif case == "nosvd-nonsep" + elseif case == "rf-nosvd-nonsep" vrfi = VectorRandomFeatureInterface( n_features, p, d, kernel_structure = NonseparableKernel(LowRankFactor(4, nugget)), - optimizer_options = optimizer_options, + optimizer_options = deepcopy(optimizer_options), ) emulator = Emulator(vrfi, iopairs, obs_noise_cov = Σ, normalize_inputs = true, decorrelate = false) @@ -200,23 +231,23 @@ function main() # Plot mean and variance of the predicted observables y1 and y2 # For this, we generate test points on a x1-x2 grid. n_pts = 200 - x1 = range(0.0, stop = 4.0 / 5.0 * 2 * π, length = n_pts) - x2 = range(0.0, stop = 4.0 / 5.0 * 2 * π, length = n_pts) + x1 = range(0.0, stop = 2 * π, length = n_pts) + x2 = range(0.0, stop = 2 * π, length = n_pts) X1, X2 = meshgrid(x1, x2) # Input for predict has to be of size input_dim x N_samples inputs = permutedims(hcat(X1[:], X2[:]), (2, 1)) - rf_mean, rf_cov = predict(emulator, inputs, transform_to_real = true) + em_mean, em_cov = predict(emulator, inputs, transform_to_real = true) println("end predictions at ", n_pts * n_pts, " points") #plot predictions for y_i in 1:d - rf_var_temp = [diag(rf_cov[j]) for j in 1:length(rf_cov)] # (40000,) - rf_var = permutedims(reduce(vcat, [x' for x in rf_var_temp]), (2, 1)) # 2 x 40000 + em_var_temp = [diag(em_cov[j]) for j in 1:length(em_cov)] # (40000,) + em_var = permutedims(reduce(vcat, [x' for x in em_var_temp]), (2, 1)) # 2 x 40000 - mean_grid = reshape(rf_mean[y_i, :], n_pts, n_pts) # 2 x 40000 + mean_grid = reshape(em_mean[y_i, :], n_pts, n_pts) # 2 x 40000 if plot_flag p5 = plot( x1, @@ -231,7 +262,7 @@ function main() zguidefontrotation = 90, ) end - var_grid = reshape(rf_var[y_i, :], n_pts, n_pts) + var_grid = reshape(em_var[y_i, :], n_pts, n_pts) if plot_flag p6 = plot( x1, @@ -248,7 +279,7 @@ function main() plot(p5, p6, layout = (1, 2), legend = false) - savefig(joinpath(output_directory, "RF_" * case * "_y" * string(y_i) * "_predictions.png")) + savefig(joinpath(output_directory, case * "_y" * string(y_i) * "_predictions.png")) end end @@ -268,7 +299,7 @@ function main() zlabel = "sin(x1) + cos(x2)", zguidefontrotation = 90, ) - savefig(joinpath(output_directory, "RF_" * case * "_true_g1.png")) + savefig(joinpath(output_directory, case * "_true_g1.png")) end g2_true = sin.(inputs[1, :]) .- cos.(inputs[2, :]) @@ -288,21 +319,21 @@ function main() ) g_true_grids = [g1_true_grid, g2_true_grid] - savefig(joinpath(output_directory, "RF_" * case * "_true_g2.png")) + savefig(joinpath(output_directory, case * "_true_g2.png")) end - MSE = 1 / size(rf_mean, 2) * sqrt(sum((rf_mean[1, :] - g1_true) .^ 2 + (rf_mean[2, :] - g2_true) .^ 2)) + MSE = 1 / size(em_mean, 2) * sqrt(sum((em_mean[1, :] - g1_true) .^ 2 + (em_mean[2, :] - g2_true) .^ 2)) println("L^2 error of mean and latent truth:", MSE) # Plot the difference between the truth and the mean of the predictions for y_i in 1:d - # Reshape rf_cov to size N_samples x output_dim - rf_var_temp = [diag(rf_cov[j]) for j in 1:length(rf_cov)] # (40000,) - rf_var = permutedims(vcat([x' for x in rf_var_temp]...), (2, 1)) # 40000 x 2 + # Reshape em_cov to size N_samples x output_dim + em_var_temp = [diag(em_cov[j]) for j in 1:length(em_cov)] # (40000,) + em_var = permutedims(vcat([x' for x in em_var_temp]...), (2, 1)) # 40000 x 2 - mean_grid = reshape(rf_mean[y_i, :], n_pts, n_pts) - var_grid = reshape(rf_var[y_i, :], n_pts, n_pts) + mean_grid = reshape(em_mean[y_i, :], n_pts, n_pts) + var_grid = reshape(em_var[y_i, :], n_pts, n_pts) # Compute and plot 1/variance * (truth - prediction)^2 if plot_flag @@ -321,9 +352,7 @@ function main() zguidefontrotation = 90, ) - savefig( - joinpath(output_directory, "RF_" * case * "_y" * string(y_i) * "_difference_truth_prediction.png"), - ) + savefig(joinpath(output_directory, case * "_y" * string(y_i) * "_difference_truth_prediction.png")) end end end