Skip to content

Commit

Permalink
format
Browse files Browse the repository at this point in the history
  • Loading branch information
odunbar committed Oct 5, 2023
1 parent 12a3785 commit 610d53a
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 71 deletions.
15 changes: 7 additions & 8 deletions examples/EDMF_data/plot_posterior.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Import modules
ENV["GKSwstype"]="100"
ENV["GKSwstype"] = "100"
using Plots

using CairoMakie, PairPlots
Expand All @@ -13,8 +13,8 @@ using CalibrateEmulateSample.ParameterDistributions
# date = Date(year,month,day)

# 2-parameter calibration exp
exp_name = "ent-det-calibration"
date_of_run = Date(2023, 10, 5)
exp_name = "ent-det-calibration"
date_of_run = Date(2023, 10, 5)

# 5-parameter calibration exp
#exp_name = "ent-det-tked-tkee-stab-calibration"
Expand Down Expand Up @@ -43,11 +43,10 @@ density_filepath = joinpath(figure_save_directory, "posterior_dist_comp.png")
transformed_density_filepath = joinpath(figure_save_directory, "posterior_dist_phys.png")
labels = get_name(posterior)

data = (; [(Symbol(labels[i]), posterior_samples[i,:]) for i in 1:length(labels)]...)
transformed_data = (; [(Symbol(labels[i]), transformed_posterior_samples[i,:]) for i in 1:length(labels)]...)
data = (; [(Symbol(labels[i]), posterior_samples[i, :]) for i in 1:length(labels)]...)
transformed_data = (; [(Symbol(labels[i]), transformed_posterior_samples[i, :]) for i in 1:length(labels)]...)

p = pairplot(data => (PairPlots.Scatter(), ))
trans_p = pairplot(transformed_data => (PairPlots.Scatter(), ))
p = pairplot(data => (PairPlots.Scatter(),))
trans_p = pairplot(transformed_data => (PairPlots.Scatter(),))
save(density_filepath, p)
save(transformed_density_filepath, trans_p)

122 changes: 59 additions & 63 deletions examples/EDMF_data/uq_for_edmf.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ PLOT_FLAG = false
# Import modules
using Distributions # probability distributions and associated functions
using LinearAlgebra
ENV["GKSwstype"]="100"
ENV["GKSwstype"] = "100"
using Plots
using Random
using JLD2
Expand Down Expand Up @@ -138,52 +138,46 @@ function main()
end
=#
# build prior if jld2 does not work
function get_prior_config(s::SS) where {SS <: AbstractString}
config = Dict()
if s == "ent-det-calibration"
config["constraints"] = Dict(
"entrainment_factor" => [bounded(0.0, 1.0)],
"detrainment_factor" => [bounded(0.0, 1.0)],
)
config["prior_mean"] = Dict(
"entrainment_factor" => 0.13,
"detrainment_factor" => 0.51,
)
config["unconstrained_σ"] = 1.0
elseif s == "ent-det-tked-tkee-stab-calibration"
config["constraints"] = Dict(
"entrainment_factor" => [bounded(0.0, 1.0)],
"detrainment_factor" => [bounded(0.0, 1.0)],
"tke_ed_coeff" => [bounded(0.01, 1.0)],
"tke_diss_coeff" => [bounded(0.01, 1.0)],
"static_stab_coeff" => [bounded(0.01, 1.0)],
)
config["prior_mean"] = Dict(
"entrainment_factor" => 0.13,
"detrainment_factor" => 0.51,
"tke_ed_coeff" => 0.14,
"tke_diss_coeff" => 0.22,
"static_stab_coeff" => 0.4,
)
config["unconstrained_σ"] = 1.0
else
throw(ArgumentError("prior for experiment $s not found, please implement in uq_for_edmf.jl"))
end
function get_prior_config(s::SS) where {SS <: AbstractString}
config = Dict()
if s == "ent-det-calibration"
config["constraints"] =
Dict("entrainment_factor" => [bounded(0.0, 1.0)], "detrainment_factor" => [bounded(0.0, 1.0)])
config["prior_mean"] = Dict("entrainment_factor" => 0.13, "detrainment_factor" => 0.51)
config["unconstrained_σ"] = 1.0
elseif s == "ent-det-tked-tkee-stab-calibration"
config["constraints"] = Dict(
"entrainment_factor" => [bounded(0.0, 1.0)],
"detrainment_factor" => [bounded(0.0, 1.0)],
"tke_ed_coeff" => [bounded(0.01, 1.0)],
"tke_diss_coeff" => [bounded(0.01, 1.0)],
"static_stab_coeff" => [bounded(0.01, 1.0)],
)
config["prior_mean"] = Dict(
"entrainment_factor" => 0.13,
"detrainment_factor" => 0.51,
"tke_ed_coeff" => 0.14,
"tke_diss_coeff" => 0.22,
"static_stab_coeff" => 0.4,
)
config["unconstrained_σ"] = 1.0
else
throw(ArgumentError("prior for experiment $s not found, please implement in uq_for_edmf.jl"))
end
return config
end
prior_config = get_prior_config(exp_name)
end
prior_config = get_prior_config(exp_name)
means = prior_config["prior_mean"]
std = prior_config["unconstrained_σ"]
constraints = prior_config["constraints"]


prior = combine_distributions([
ParameterDistribution(Dict(
"name" => name,
"distribution" => Parameterized(Normal(mean,std)),
"constraint" => constraints[name],
)) for (name, mean) in means ])


prior = combine_distributions([
ParameterDistribution(
Dict("name" => name, "distribution" => Parameterized(Normal(mean, std)), "constraint" => constraints[name]),
) for (name, mean) in means
])

# Option (ii) load EKP object
# max_ekp_it = 10 # use highest available iteration file
# ekp_filename = "ekpobj_iter_"*max_ekp_it*"10.jld2"
Expand All @@ -210,22 +204,22 @@ ParameterDistribution(Dict(
"RF-vector-svd-nondiag",
"RF-vector-svd-nonsep",
]
case = cases[5]

overrides = Dict(
"verbose" => true,
"train_fraction" => 0.95,
"scheduler" => DataMisfitController(terminate_at = 100),
"cov_sample_multiplier" => 0.5,
"n_iteration" => 5,
# "n_ensemble" => 20,
# "localization" => SEC(0.1), # localization / sample error correction for small ensembles
)
nugget = 0.01
rng_seed = 99330
rng = Random.MersenneTwister(rng_seed)
input_dim = size(get_inputs(input_output_pairs),1)
output_dim = size(get_outputs(input_output_pairs),1)
case = cases[5]

overrides = Dict(
"verbose" => true,
"train_fraction" => 0.95,
"scheduler" => DataMisfitController(terminate_at = 100),
"cov_sample_multiplier" => 0.5,
"n_iteration" => 5,
# "n_ensemble" => 20,
# "localization" => SEC(0.1), # localization / sample error correction for small ensembles
)
nugget = 0.01
rng_seed = 99330
rng = Random.MersenneTwister(rng_seed)
input_dim = size(get_inputs(input_output_pairs), 1)
output_dim = size(get_outputs(input_output_pairs), 1)
if case == "GP"

gppackage = Emulators.SKLJL()
Expand All @@ -238,7 +232,7 @@ output_dim = size(get_outputs(input_output_pairs),1)
)
elseif case ["RF-scalar"]
n_features = 100
kernel_structure = SeparableKernel(CholeskyFactor(nugget),OneDimFactor())
kernel_structure = SeparableKernel(CholeskyFactor(nugget), OneDimFactor())
mlt = ScalarRandomFeatureInterface(
n_features,
input_dim,
Expand All @@ -248,9 +242,11 @@ output_dim = size(get_outputs(input_output_pairs),1)
)
elseif case ["RF-vector-svd-diag", "RF-vector-svd-nondiag"]
# do we want to assume that the outputs are decorrelated in the machine-learning problem?
kernel_structure = case ["RF-vector-svd-diag"] ? SeparableKernel(LowRankFactor(1,nugget), DiagonalFactor(nugget)) : SeparableKernel(LowRankFactor(2,nugget),LowRankFactor(2, nugget))
kernel_structure =
case ["RF-vector-svd-diag"] ? SeparableKernel(LowRankFactor(1, nugget), DiagonalFactor(nugget)) :
SeparableKernel(LowRankFactor(2, nugget), LowRankFactor(2, nugget))
n_features = 500

mlt = VectorRandomFeatureInterface(
n_features,
input_dim,
Expand All @@ -262,7 +258,7 @@ output_dim = size(get_outputs(input_output_pairs),1)
elseif case ["RF-vector-svd-nonsep"]
kernel_structure = NonseparableKernel(LowRankFactor(3, nugget))
n_features = 500

mlt = VectorRandomFeatureInterface(
n_features,
input_dim,
Expand All @@ -271,7 +267,7 @@ output_dim = size(get_outputs(input_output_pairs),1)
kernel_structure = kernel_structure,
optimizer_options = overrides,
)
end
end

# Fit an emulator to the data
normalized = true
Expand Down

0 comments on commit 610d53a

Please sign in to comment.