@@ -264,66 +264,50 @@ function fit!(model::DynamicFactorModel;
264264end
265265
266266"""
267- model_tuning_ic!(model, regularizers; ic = :bic, parallel = false , verbose = false,
267+ model_tuning_ic!(model, space, regularizer; trials = 100, ic = :bic , verbose = false,
268268 kwargs...) -> (model_opt, index_opt)
269269
270- Search for the optimal regularizer in `regularizers` for the dynamic factor model `model`
271- using information criterion `ic`. If `parallel` is true, the search is performed in
272- parallel. If `verbose` is true, a summary of model tuning and progress of the search is
273- printed. Additional keyword arguments `kwargs` are passed to the `fit!` function.
270+ Search for the optimal regularizer in search space `space` for the dynamic factor model
271+ `model` using information criterion `ic` and a Tree Parzen estimator performing number of
272+ trials given by `trials`, where `regularizer` is a function that creates the regularizer
273+ from a dictionary of hyperparameters. If `verbose` is true, a summary of model tuning and
274+ progress of the search is printed. Additional keyword arguments `kwargs` are passed to the
275+ `fit!` function.
274276"""
275- function model_tuning_ic! (model:: DynamicFactorModel , regularizers :: AbstractArray ;
276- ic :: Symbol = :bic , parallel :: Bool = false , verbose:: Bool = false ,
277+ function model_tuning_ic! (model:: DynamicFactorModel , space :: Dict , regularizer :: Function ;
278+ trials :: Integer = 100 , ic :: Symbol = :bic , verbose:: Bool = false ,
277279 kwargs... )
278280 ic ∉ (:aic , :aicc , :bic ) && error (" Information criterion $ic not supported." )
279281
280282 if verbose
281283 println (" Model tuning summary" )
282284 println (" ====================" )
283- println (" Number of regularizers : $( length (regularizers)) " )
285+ println (" Number of trials : $trials " )
284286 println (" Information criterion: $ic " )
285- println (" Parallel: $(parallel ? " yes" : " no" ) " )
286287 println (" ====================" )
287288 end
288289
289- # model tuning
290- map_func = parallel ? verbose ? progress_pmap : pmap : verbose ? progress_map : map
291- θ0 = params (model)
292- f0 = copy (factors (model))
293- θ = map_func (regularizers) do regularizer
294- try
295- params! (model, θ0)
296- factors (model) .= f0
297- fit! (model, regularizer = regularizer; kwargs... )
298- params (model)
299- catch
300- missing
301- end
302- end
303- ic_values = map (θ) do θi
304- if all (ismissing .(θi))
305- missing
306- else
307- params! (model, θi)
308- eval (ic)(model)
309- end
310- end
311- (ic_opt, index_opt) = findmin (x -> isnan (x) ? Inf : x, skipmissing (ic_values))
312- params! (model, θ[index_opt])
313- (α, _, _) = smoother (model)
314- for (t, αt) in pairs (α)
315- factors (model)[:, t] = αt
290+ # objective function
291+ function objective (params)
292+ fit! (model, regularizer = regularizer (params))
293+
294+ return eval (ic)(model)
316295 end
317296
297+ # model tuning
298+ best = fmin (objective, space, trials)
299+
300+ # refit
301+ fit! (model, regularizer = regularizer (best))
302+
318303 if verbose
319304 println (" ====================" )
320- println (" Optimal regularizer index: $(index_opt) " )
321- println (" Optimal information criterion: $(ic_opt) " )
322- println (" Failed fits: $(sum (ismissing .(ic_values))) " )
305+ println (" Optimal regularizer: $best " )
306+ println (" Optimal information criterion: $(eval (ic)(model)) " )
323307 println (" ====================" )
324308 end
325309
326- return (model, index_opt )
310+ return (model, best )
327311end
328312
329313"""
0 commit comments