Skip to content

Commit

Permalink
make exp a little harder..
Browse files Browse the repository at this point in the history
  • Loading branch information
odunbar committed Aug 20, 2024
1 parent 6f76458 commit a111a8a
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 36 deletions.
8 changes: 4 additions & 4 deletions examples/Darcy/calibrate.jl
Original file line number Diff line number Diff line change
Expand Up @@ -49,13 +49,13 @@ function main()
dim = 2
N, L = 80, 1.0
pts_per_dim = LinRange(0, L, N)
obs_ΔN = 10
obs_ΔN = 20

# To provide a simple test case, we assume that the true function parameter is a particular sample from the function space we set up to define our prior. More precisely we choose a value of the truth that doesnt have a vanishingly small probability under the prior defined by a probability distribution over functions; here taken as a family of Gaussian Random Fields (GRF). The function distribution is characterized by a covariance function - here a Matern kernel which assumes a level of smoothness over the samples from the distribution. We define an appropriate expansion of this distribution, here based on the Karhunen-Loeve expansion (similar to an eigenvalue-eigenfunction expansion) that is truncated to a finite number of terms, known as the degrees of freedom (`dofs`). The `dofs` define the effective dimension of the learning problem, decoupled from the spatial discretization. Explicitly, larger `dofs` may be required to represent multiscale functions, but come at an increased dimension of the parameter space and therefore a typical increase in cost and difficulty of the learning problem.

smoothness = 1.0
corr_length = 0.25
dofs = 5
dofs = 6

grf = GRF.GaussianRandomField(
GRF.CovarianceFunction(dim, GRF.Matern(smoothness, corr_length)),
Expand Down Expand Up @@ -84,7 +84,7 @@ function main()
println(" Number of observation points: $(darcy.N_y)")
h_2d_true = solve_Darcy_2D(darcy, κ_true)
y_noiseless = compute_obs(darcy, h_2d_true)
obs_noise_cov = 0.05^2 * I(length(y_noiseless)) * (maximum(y_noiseless) - minimum(y_noiseless))
obs_noise_cov = 0.2^2 * I(length(y_noiseless)) * (maximum(y_noiseless) - minimum(y_noiseless))
truth_sample = vec(y_noiseless + rand(rng, MvNormal(zeros(length(y_noiseless)), obs_noise_cov)))


Expand All @@ -93,7 +93,7 @@ function main()


# We define some algorithm parameters, here we take ensemble members larger than the dimension of the parameter space
N_ens = 50 # number of ensemble members
N_ens = 30 # number of ensemble members
N_iter = 5 # number of EKI iterations

# We sample the initial ensemble from the prior, and create the EKP object as an EKI algorithm using the `Inversion()` keyword
Expand Down
32 changes: 0 additions & 32 deletions examples/Darcy/emulate_sample.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ function main()

cases = [
"GP", # diagonalize, train scalar GP, assume diag inputs
"RF-vector-svd-nonsep",
]

#### CHOOSE YOUR CASE:
Expand All @@ -33,25 +32,6 @@ function main()
println("case: ", case)
min_iter = 1
max_iter = 5 # number of EKP iterations to use data from is at most this
overrides = Dict(
"verbose" => true,
"scheduler" => DataMisfitController(terminate_at = 100.0),
"cov_sample_multiplier" => 0.5,
"n_iteration" => 20,
"n_ensemble" => 120,
"localization" => Localizers.SECNice(1000, 0.1, 1.0),
)
# we do not want termination, as our priors have relatively little interpretation

# Should be loaded:
dim = 2
N, L = 80, 1.0
pts_per_dim = LinRange(0, L, N)
obs_ΔN = 10
smoothness = 1.0
corr_length = 0.25
dofs = 20 # i.e. the input space dimension
####

exp_name = "darcy"
rng_seed = 940284
Expand Down Expand Up @@ -93,18 +73,6 @@ function main()
# gppackage = Emulators.SKLJL()
gppackage = Emulators.GPJL()
mlt = GaussianProcess(gppackage; noise_learn = false)
elseif case ["RF-vector-svd-nonsep"]
kernel_structure = NonseparableKernel(LowRankFactor(1, nugget))
n_features = 100

mlt = VectorRandomFeatureInterface(
n_features,
n_params,
output_dim,
rng = rng,
kernel_structure = kernel_structure,
optimizer_options = overrides,
)
end


Expand Down

0 comments on commit a111a8a

Please sign in to comment.