This crate provides a hyperparameter optimization algorithm using TPE (Tree-structured Parzen Estimator).
An example optimizing a simple quadratic function which has one numerical and one categorical parameters.
use rand::SeedableRng as _;
let choices = [1, 10, 100];
let mut optim0 =
tpe::TpeOptimizer::new(tpe::parzen_estimator(), tpe::range(-5.0, 5.0)?);
let mut optim1 =
tpe::TpeOptimizer::new(tpe::histogram_estimator(), tpe::categorical_range(choices.len())?);
fn objective(x: f64, y: i32) -> f64 {
x.powi(2) + y as f64
}
let mut best_value = std::f64::INFINITY;
let mut rng = rand::rngs::StdRng::from_seed(Default::default());
for _ in 0..100 {
let x = optim0.ask(&mut rng)?;
let y = optim1.ask(&mut rng)?;
let v = objective(x, choices[y as usize]);
optim0.tell(x, v)?;
optim1.tell(y, v)?;
best_value = best_value.min(v);
}
assert_eq!(best_value, 1.000098470725203);
kurobako
benchmark
There is an example examples/tpe-solver.rs which implements
the kurobako
solver interface, so you can run a benchmark using TPE as follows:
$ PROBLEMS=$(kurobako problem-suite sigopt auc)
$ SOLVERS="$(kurobako solver command -- cargo run --release --example tpe-solver) $(kurobako solver optuna)"
$ kurobako studies --solvers $SOLVERS --problems $PROBLEMS --repeats 30 --budget 80 | kurobako run > result.json
$ cat result.json | kurobako report > report.md
The result (report.md
) of the above commands is shown here.
Please refer to the following papers about the details of TPE: