Skip to content

Commit 5eada45

Browse files
committed
tuner
1 parent febd804 commit 5eada45

File tree

5 files changed

+194
-0
lines changed

5 files changed

+194
-0
lines changed

DESCRIPTION

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,6 +103,7 @@ Collate:
103103
'SurrogateLearnerAsync.R'
104104
'SurrogateLearnerCollection.R'
105105
'TunerAsyncMbo.R'
106+
'TunerAsyncMboADBO.R'
106107
'TunerMbo.R'
107108
'mlr_loop_functions.R'
108109
'bayesopt_ego.R'

NAMESPACE

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ export(SurrogateLearner)
3030
export(SurrogateLearnerAsync)
3131
export(SurrogateLearnerCollection)
3232
export(TunerAsyncMbo)
33+
export(TunerAsyncMboADBO)
3334
export(TunerMbo)
3435
export(acqf)
3536
export(acqfs)

R/TunerAsyncMboADBO.R

Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
#' @title Asynchronous Decentralized Bayesian Optimization
2+
#' @name mlr_tuners_adbo
3+
#'
4+
#' @description
5+
#' `TunerAsyncMboADBO` class that implements Asynchronous Decentralized Bayesian Optimization (ADBO).
6+
#' ADBO is a variant of Asynchronous Model Based Optimization (AMBO) that uses [AcqFunctionStochasticCB] with exponential lambda decay.
7+
#'
8+
#' @note
9+
#' The lambda parameter of the upper confidence bound acquisition function controls the trade-off between exploration and exploitation.
10+
#' A large lambda value leads to more exploration, while a small lambda value leads to more exploitation.
11+
#' The initial lambda value is drawn from an exponential distribution with rate `1 / lambda`.
12+
#' ADBO can use periodic exponential decay to reduce lambda periodically with the formula `lambda * exp(-rate * (t %% period))`.
13+
#' The surrogate model is always a random forest and die acquisition optimizer is random search with a budget of 10,000 evaluations.
14+
#'
15+
#' @section Parameters:
16+
#' \describe{
17+
#' \item{`lambda`}{`numeric(1)`\cr
18+
#' Lambda value for sampling from the exponential distribution.}
19+
#' \item{`rate`}{`numeric(1)`\cr
20+
#' Rate of the exponential decay.}
21+
#' \item{`period`}{`integer(1)`\cr
22+
#' Period of the exponential decay.}
23+
#' \item{`initial_design_size`}{`integer(1)`\cr
24+
#' Size of the initial design.
25+
#' Defaults to `100`.}
26+
#'
27+
#' \item{`initial_design`}{`data.table::data.table()`\cr
28+
#' Initial design of the optimization.
29+
#' If `NULL`, a design of size `design_size` is generated with `design_function`.}
30+
#' \item{`design_size`}{`integer(1)`\cr
31+
#' Size of the initial design.}
32+
#' \item{`design_function`}{`character(1)`\cr
33+
#' Function to generate the initial design.
34+
#' One of `c("random", "sobol", "lhs")`.}
35+
#' \item{`n_workers`}{`integer(1)`\cr
36+
#' Number of parallel workers.
37+
#' If `NULL`, all rush workers set with [rush::rush_plan()] are used.}
38+
#' }
39+
#'
40+
#'
41+
#' @references
42+
#' * `r format_bib("egele_2023")`
43+
#'
44+
#' @export
45+
TunerAsyncMboADBO = R6Class("TunerAsyncMboADBO",
46+
inherit = mlr3tuning::TunerAsyncFromOptimizerAsync,
47+
public = list(
48+
49+
#' @description
50+
#' Creates a new instance of this [R6][R6::R6Class] class.
51+
initialize = function() {
52+
optimizer = OptimizerAsyncMboADBO$new()
53+
54+
super$initialize(
55+
optimizer = optimizer,
56+
man = "mlr3tuning::mlr_tuners_adbo"
57+
)
58+
}
59+
)
60+
)
61+
62+
#' @include aaa.R
63+
tuners[["adbo"]] = TunerAsyncMboADBO

man/mlr_tuners_adbo.Rd

Lines changed: 100 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
2+
test_that("async mbo tuner works", {
3+
skip_on_cran()
4+
skip_if_not_installed("rush")
5+
flush_redis()
6+
7+
learner = lrn("classif.rpart",
8+
minsplit = to_tune(2, 128),
9+
cp = to_tune(1e-04, 1e-1))
10+
11+
rush::rush_plan(n_workers = 4)
12+
instance = ti_async(
13+
task = tsk("pima"),
14+
learner = learner,
15+
resampling = rsmp("cv", folds = 3),
16+
measure = msr("classif.ce"),
17+
terminator = trm("evals", n_evals = 20),
18+
store_benchmark_result = FALSE
19+
)
20+
21+
tuner = tnr("adbo", design_size = 10)
22+
23+
expect_data_table(tuner$optimize(instance), nrows = 1)
24+
expect_data_table(instance$archive$data, min.rows = 10)
25+
expect_names(names(instance$archive$data), must.include = c("acq_cb", ".already_evaluated", "lambda_0", "lambda"))
26+
27+
expect_rush_reset(instance$rush)
28+
})
29+

0 commit comments

Comments
 (0)