diff --git a/Project.toml b/Project.toml index abf405ce2..998d81a74 100644 --- a/Project.toml +++ b/Project.toml @@ -3,12 +3,11 @@ uuid = "429524aa-4258-5aef-a3af-852621145aeb" version = "1.14.0" [deps] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" EnumX = "4e289a0a-7415-4d19-859d-a7e5c4648b56" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" -ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" -MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" NLSolversBase = "d41bc354-129a-5804-8e4c-c37616107c6c" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" PositiveFactorizations = "85a6dd25-e78a-55b7-8502-1745935b8125" @@ -50,17 +49,14 @@ Test = "<0.0.1, 1.6" julia = "1.10" [extras] -ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595" Distributions = "31c24e10-a181-5473-b8eb-7969acd0382f" ExplicitImports = "7d51a73a-1435-4ff3-83d9-f097790105c7" +ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b" -LineSearches = "d3d80556-e9d4-5f37-9878-2ab0fcc64255" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" Measurements = "eff96d63-e80a-5855-80a2-b1b0885c5ab7" -NLSolversBase = "d41bc354-129a-5804-8e4c-c37616107c6c" OptimTestProblems = "cec144fc-5a64-5bc6-99fb-dde8f63e154c" -PositiveFactorizations = "85a6dd25-e78a-55b7-8502-1745935b8125" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" RecursiveArrayTools = "731186ca-8d62-57ce-b412-fbd966d074cd" ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267" @@ -68,4 +64,4 @@ StableRNGs = "860ef19b-820b-49d6-a774-d7a799459cd3" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["Test", "Aqua", "Distributions", "ExplicitImports", "JET", "MathOptInterface", "Measurements", "OptimTestProblems", "Random", "RecursiveArrayTools", "StableRNGs", "LineSearches", "NLSolversBase", "PositiveFactorizations", "ReverseDiff", "ADTypes"] +test = ["Test", "Aqua", "Distributions", "ExplicitImports", "ForwardDiff", "JET", "MathOptInterface", "Measurements", "OptimTestProblems", "Random", "RecursiveArrayTools", "StableRNGs", "ReverseDiff"] diff --git a/docs/Project.toml b/docs/Project.toml index e112c9774..c8ea546db 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,4 +1,5 @@ [deps] +ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b" Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" DocumenterCitations = "daee34ce-89f3-4625-b898-19384cb65244" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" diff --git a/docs/src/examples/ipnewton_basics.jl b/docs/src/examples/ipnewton_basics.jl index 12d37b99e..a8564957b 100644 --- a/docs/src/examples/ipnewton_basics.jl +++ b/docs/src/examples/ipnewton_basics.jl @@ -78,7 +78,7 @@ using Test #src @test Optim.converged(res) #src @test Optim.minimum(res) ≈ 0.25 #src -# Like the rest of Optim, you can also use `autodiff=:forward` and just pass in +# Like the rest of Optim, you can also use `autodiff=ADTypes.AutoForwardDiff()` and just pass in # `fun`. # If we only want to set lower bounds, use `ux = fill(Inf, 2)` diff --git a/docs/src/examples/maxlikenlm.jl b/docs/src/examples/maxlikenlm.jl index ad07c4382..d8da0aafa 100644 --- a/docs/src/examples/maxlikenlm.jl +++ b/docs/src/examples/maxlikenlm.jl @@ -22,6 +22,7 @@ using Optim, NLSolversBase using LinearAlgebra: diag using ForwardDiff +using ADTypes: AutoForwardDiff #md # !!! tip #md # Add Optim with the following command at the Julia command prompt: @@ -152,7 +153,7 @@ end func = TwiceDifferentiable( vars -> Log_Likelihood(x, y, vars[1:nvar], vars[nvar+1]), ones(nvar + 1); - autodiff = :forward, + autodiff = AutoForwardDiff(), ); # The above statment accepts 4 inputs: the x matrix, the dependent @@ -163,7 +164,7 @@ func = TwiceDifferentiable( # the error variance. # # The `ones(nvar+1)` are the starting values for the parameters and -# the `autodiff=:forward` command performs forward mode automatic +# the `autodiff=ADTypes.AutoForwardDiff()` command performs forward mode automatic # differentiation. # # The actual optimization of the likelihood function is accomplished diff --git a/docs/src/user/gradientsandhessians.md b/docs/src/user/gradientsandhessians.md index 168a9d386..dc1af2352 100644 --- a/docs/src/user/gradientsandhessians.md +++ b/docs/src/user/gradientsandhessians.md @@ -16,10 +16,10 @@ Automatic differentiation techniques are a middle ground between finite differen Reverse-mode automatic differentiation can be seen as an automatic implementation of the adjoint method mentioned above, and requires a runtime comparable to only one evaluation of ``f``. It is however considerably more complex to implement, requiring to record the execution of the program to then run it backwards, and incurs a larger overhead. -Forward-mode automatic differentiation is supported through the [ForwardDiff.jl](https://github.com/JuliaDiff/ForwardDiff.jl) package by providing the `autodiff=:forward` keyword to `optimize`. -More generic automatic differentiation is supported thanks to [DifferentiationInterface.jl](https://github.com/JuliaDiff/DifferentiationInterface.jl), by setting `autodiff` to any compatible backend object from [ADTypes.jl](https://github.com/SciML/ADTypes.jl). -For instance, the user can choose `autodiff=AutoReverseDiff()`, `autodiff=AutoEnzyme()`, `autodiff=AutoMooncake()` or `autodiff=AutoZygote()` for a reverse-mode gradient computation, which is generally faster than forward mode on large inputs. -Each of these choices requires loading the corresponding package beforehand. +Generic automatic differentiation is supported thanks to [DifferentiationInterface.jl](https://github.com/JuliaDiff/DifferentiationInterface.jl), by setting `autodiff` to any compatible backend object from [ADTypes.jl](https://github.com/SciML/ADTypes.jl). +For instance, forward-mode automatic differentiation through the [ForwardDiff.jl](https://github.com/JuliaDiff/ForwardDiff.jl) package by providing the `autodiff=ADTypes.AutoForwardDiff()` keyword to `optimize`. +Additionally, the user can choose `autodiff=AutoReverseDiff()`, `autodiff=AutoEnzyme()`, `autodiff=AutoMooncake()` or `autodiff=AutoZygote()` for a reverse-mode gradient computation, which is generally faster than forward mode on large inputs. +Each of these choices requires loading the `ADTypes` package and the corresponding automatic differentiation package (e.g., `ForwardDiff` or `ReverseDiff`) beforehand. ## Example @@ -66,14 +66,16 @@ julia> Optim.minimizer(optimize(f, initial_x, BFGS())) ``` Still looks good. Returning to automatic differentiation, let us try both solvers using this method. We enable [forward mode](https://github.com/JuliaDiff/ForwardDiff.jl) automatic -differentiation by using the `autodiff = :forward` keyword. +differentiation by using the `autodiff = AutoForwardDiff()` keyword. ```jlcon -julia> Optim.minimizer(optimize(f, initial_x, BFGS(); autodiff = :forward)) +julia> using ADTypes: AutoForwardDiff + +julia> Optim.minimizer(optimize(f, initial_x, BFGS(); autodiff = AutoForwardDiff())) 2-element Array{Float64,1}: 1.0 1.0 -julia> Optim.minimizer(optimize(f, initial_x, Newton(); autodiff = :forward)) +julia> Optim.minimizer(optimize(f, initial_x, Newton(); autodiff = AutoForwardDiff())) 2-element Array{Float64,1}: 1.0 1.0 diff --git a/docs/src/user/minimization.md b/docs/src/user/minimization.md index 238ad591e..1f81d577b 100644 --- a/docs/src/user/minimization.md +++ b/docs/src/user/minimization.md @@ -26,9 +26,10 @@ If we pass `f` alone, Optim will construct an approximate gradient for us using ```jl optimize(f, x0, LBFGS()) ``` -For better performance and greater precision, you can pass your own gradient function. If your objective is written in all Julia code with no special calls to external (that is non-Julia) libraries, you can also use automatic differentiation, by using the `autodiff` keyword and setting it to `:forward`: +For better performance and greater precision, you can pass your own gradient function. If your objective is written in all Julia code with no special calls to external (that is non-Julia) libraries, you can also use automatic differentiation, by using the `autodiff` keyword and setting it to `AutoForwardDiff()`: ```julia -optimize(f, x0, LBFGS(); autodiff = :forward) +using ADTypes: AutoForwardDiff +optimize(f, x0, LBFGS(); autodiff = AutoForwardDiff()) ``` For the Rosenbrock example, the analytical gradient can be shown to be: diff --git a/ext/OptimMOIExt.jl b/ext/OptimMOIExt.jl index 5d0efa1d8..8f9147ef8 100644 --- a/ext/OptimMOIExt.jl +++ b/ext/OptimMOIExt.jl @@ -333,7 +333,7 @@ function MOI.optimize!(model::Optimizer{T}) where {T} inplace = true, ) else - d = Optim.promote_objtype(method, initial_x, :finite, true, f, g!, h!) + d = Optim.promote_objtype(method, initial_x, Optim.DEFAULT_AD_TYPE, true, f, g!, h!) options = Optim.Options(; Optim.default_options(method)..., options...) if nl_constrained || has_bounds if nl_constrained diff --git a/src/Optim.jl b/src/Optim.jl index f97375a6e..a311e1af9 100644 --- a/src/Optim.jl +++ b/src/Optim.jl @@ -50,6 +50,8 @@ using NLSolversBase: # var for NelderMead import StatsBase: var +import ADTypes + using LinearAlgebra: LinearAlgebra, Diagonal, diff --git a/src/multivariate/optimize/interface.jl b/src/multivariate/optimize/interface.jl index 49603f655..4642dd0e0 100644 --- a/src/multivariate/optimize/interface.jl +++ b/src/multivariate/optimize/interface.jl @@ -4,6 +4,9 @@ fallback_method(f) = NelderMead() fallback_method(f, g!) = LBFGS() fallback_method(f, g!, h!) = Newton() +# By default, use central finite difference method +const DEFAULT_AD_TYPE = ADTypes.AutoFiniteDiff(; fdtype = Val(:central)) + function fallback_method(f::InplaceObjective) if !(f.fdf isa Nothing) if !(f.hv isa Nothing) @@ -36,48 +39,48 @@ fallback_method(d::OnceDifferentiable) = LBFGS() fallback_method(d::TwiceDifferentiable) = Newton() # promote the objective (tuple of callables or an AbstractObjective) according to method requirement -promote_objtype(method, initial_x, autodiff, inplace::Bool, args...) = +promote_objtype(method, initial_x, autodiff::ADTypes.AbstractADType, inplace::Bool, args...) = error("No default objective type for $method and $args.") # actual promotions, notice that (args...) captures FirstOrderOptimizer and NonDifferentiable, etc -promote_objtype(method::ZerothOrderOptimizer, x, autodiff, inplace::Bool, args...) = +promote_objtype(method::ZerothOrderOptimizer, x, autodiff::ADTypes.AbstractADType, inplace::Bool, args...) = NonDifferentiable(args..., x, real(zero(eltype(x)))) -promote_objtype(method::FirstOrderOptimizer, x, autodiff, inplace::Bool, f) = +promote_objtype(method::FirstOrderOptimizer, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f) = OnceDifferentiable(f, x, real(zero(eltype(x))); autodiff = autodiff) -promote_objtype(method::FirstOrderOptimizer, x, autodiff, inplace::Bool, args...) = +promote_objtype(method::FirstOrderOptimizer, x, autodiff::ADTypes.AbstractADType, inplace::Bool, args...) = OnceDifferentiable(args..., x, real(zero(eltype(x))); inplace = inplace) -promote_objtype(method::FirstOrderOptimizer, x, autodiff, inplace::Bool, f, g, h) = +promote_objtype(method::FirstOrderOptimizer, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f, g, h) = OnceDifferentiable(f, g, x, real(zero(eltype(x))); inplace = inplace) -promote_objtype(method::SecondOrderOptimizer, x, autodiff, inplace::Bool, f) = +promote_objtype(method::SecondOrderOptimizer, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f) = TwiceDifferentiable(f, x, real(zero(eltype(x))); autodiff = autodiff) promote_objtype( method::SecondOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, f::NotInplaceObjective, ) = TwiceDifferentiable(f, x, real(zero(eltype(x)))) promote_objtype( method::SecondOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, f::InplaceObjective, ) = TwiceDifferentiable(f, x, real(zero(eltype(x)))) promote_objtype( method::SecondOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, f::NLSolversBase.InPlaceObjectiveFGHv, ) = TwiceDifferentiableHV(f, x) promote_objtype( method::SecondOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, f::NLSolversBase.InPlaceObjectiveFG_Hv, ) = TwiceDifferentiableHV(f, x) -promote_objtype(method::SecondOrderOptimizer, x, autodiff, inplace::Bool, f, g) = +promote_objtype(method::SecondOrderOptimizer, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f, g) = TwiceDifferentiable( f, g, @@ -86,48 +89,48 @@ promote_objtype(method::SecondOrderOptimizer, x, autodiff, inplace::Bool, f, g) inplace = inplace, autodiff = autodiff, ) -promote_objtype(method::SecondOrderOptimizer, x, autodiff, inplace::Bool, f, g, h) = +promote_objtype(method::SecondOrderOptimizer, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f, g, h) = TwiceDifferentiable(f, g, h, x, real(zero(eltype(x))); inplace = inplace) # no-op promote_objtype( method::ZerothOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, nd::NonDifferentiable, ) = nd promote_objtype( method::ZerothOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, od::OnceDifferentiable, ) = od promote_objtype( method::FirstOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, od::OnceDifferentiable, ) = od promote_objtype( method::ZerothOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, td::TwiceDifferentiable, ) = td promote_objtype( method::FirstOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, td::TwiceDifferentiable, ) = td promote_objtype( method::SecondOrderOptimizer, x, - autodiff, + autodiff::ADTypes.AbstractADType, inplace::Bool, td::TwiceDifferentiable, ) = td @@ -136,8 +139,8 @@ promote_objtype( function optimize( f, initial_x::AbstractArray; - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) method = fallback_method(f) d = promote_objtype(method, initial_x, autodiff, inplace, f) @@ -149,8 +152,8 @@ function optimize( f, g, initial_x::AbstractArray; - autodiff = :finite, - inplace = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, + inplace::Bool = true, ) method = fallback_method(f, g) @@ -165,8 +168,8 @@ function optimize( g, h, initial_x::AbstractArray; - inplace = true, - autodiff = :finite + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) method = fallback_method(f, g, h) d = promote_objtype(method, initial_x, autodiff, inplace, f, g, h) @@ -188,8 +191,8 @@ function optimize( f, initial_x::AbstractArray, options::Options; - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) method = fallback_method(f) d = promote_objtype(method, initial_x, autodiff, inplace, f) @@ -200,8 +203,8 @@ function optimize( g, initial_x::AbstractArray, options::Options; - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) method = fallback_method(f, g) @@ -214,8 +217,8 @@ function optimize( h, initial_x::AbstractArray{T}, options::Options; - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) where {T} method = fallback_method(f, g, h) d = promote_objtype(method, initial_x, autodiff, inplace, f, g, h) @@ -229,8 +232,8 @@ function optimize( initial_x::AbstractArray, method::AbstractOptimizer, options::Options = Options(; default_options(method)...); - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) d = promote_objtype(method, initial_x, autodiff, inplace, f) optimize(d, initial_x, method, options) @@ -241,8 +244,8 @@ function optimize( initial_x::AbstractArray, method::AbstractOptimizer, options::Options = Options(; default_options(method)...); - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) d = promote_objtype(method, initial_x, autodiff, inplace, f) @@ -254,8 +257,8 @@ function optimize( initial_x::AbstractArray, method::AbstractOptimizer, options::Options = Options(; default_options(method)...); - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) d = promote_objtype(method, initial_x, autodiff, inplace, f, g) @@ -268,8 +271,8 @@ function optimize( initial_x::AbstractArray, method::AbstractOptimizer, options::Options = Options(; default_options(method)...); - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) d = promote_objtype(method, initial_x, autodiff, inplace, f, g, h) @@ -282,8 +285,8 @@ function optimize( initial_x::AbstractArray, method::SecondOrderOptimizer, options::Options = Options(; default_options(method)...); - inplace = true, - autodiff = :finite, + inplace::Bool = true, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) where {D<:Union{NonDifferentiable,OnceDifferentiable}} d = promote_objtype(method, initial_x, autodiff, inplace, d) optimize(d, initial_x, method, options) diff --git a/src/multivariate/solvers/constrained/fminbox.jl b/src/multivariate/solvers/constrained/fminbox.jl index 4f12ab0e5..664b74598 100644 --- a/src/multivariate/solvers/constrained/fminbox.jl +++ b/src/multivariate/solvers/constrained/fminbox.jl @@ -282,7 +282,7 @@ function optimize( F::Fminbox = Fminbox(), options::Options = Options(); inplace::Bool=true, - autodiff = :finite, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) if f isa NonDifferentiable f = f.f @@ -308,7 +308,7 @@ function optimize( optimize(od, l, u, initial_x, F, options) end -function optimize(f, l::Number, u::Number, initial_x::AbstractArray; autodiff = :finite) +function optimize(f, l::Number, u::Number, initial_x::AbstractArray; autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE) T = eltype(initial_x) optimize( OnceDifferentiable(f, initial_x, zero(T); autodiff), @@ -328,7 +328,7 @@ function optimize( mo::AbstractConstrainedOptimizer, opt::Options = Options(); inplace::Bool=true, - autodiff = :finite, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) T = eltype(initial_x) optimize( @@ -350,7 +350,7 @@ function optimize( mo::AbstractConstrainedOptimizer = Fminbox(), opt::Options = Options(); inplace::Bool=true, - autodiff = :finite, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) T = eltype(initial_x) optimize(f, T.(l), Fill(T(u), size(initial_x)...), initial_x, mo, opt; inplace, autodiff) @@ -363,7 +363,7 @@ function optimize( mo::AbstractConstrainedOptimizer=Fminbox(), opt::Options = Options(); inplace::Bool=true, - autodiff = :finite, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) T = eltype(initial_x) optimize(f, Fill(T(l), size(initial_x)...), T.(u), initial_x, mo, opt; inplace, autodiff) @@ -376,7 +376,7 @@ function optimize( initial_x::AbstractArray, opt::Options; inplace::Bool=true, - autodiff = :finite, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) T = eltype(initial_x) @@ -400,7 +400,7 @@ function optimize( initial_x::AbstractArray, opt::Options; inplace::Bool=true, - autodiff = :finite, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) T = eltype(initial_x) optimize(f, g, T.(l), Fill(T(u), size(initial_x)...), initial_x, opt; inplace, autodiff) @@ -414,7 +414,7 @@ function optimize( initial_x::AbstractArray, opt::Options; inplace::Bool=true, - autodiff = :finite, + autodiff::ADTypes.AbstractADType = DEFAULT_AD_TYPE, ) T= eltype(initial_x) optimize(f, g, Fill(T(l), size(initial_x)...), T.(u), initial_x, opt; inplace, autodiff) diff --git a/src/multivariate/solvers/constrained/ipnewton/ipnewton.jl b/src/multivariate/solvers/constrained/ipnewton/ipnewton.jl index 5a5c26e00..cc8a20914 100644 --- a/src/multivariate/solvers/constrained/ipnewton/ipnewton.jl +++ b/src/multivariate/solvers/constrained/ipnewton/ipnewton.jl @@ -7,10 +7,10 @@ end Base.summary(io::IO, ::IPNewton) = print(io, "Interior Point Newton") -promote_objtype(method::IPNewton, x, autodiff, inplace::Bool, f::TwiceDifferentiable) = f -promote_objtype(method::IPNewton, x, autodiff, inplace::Bool, f) = +promote_objtype(method::IPNewton, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f::TwiceDifferentiable) = f +promote_objtype(method::IPNewton, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f) = TwiceDifferentiable(f, x, real(zero(eltype(x))); autodiff = autodiff) -promote_objtype(method::IPNewton, x, autodiff, inplace::Bool, f, g) = TwiceDifferentiable( +promote_objtype(method::IPNewton, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f, g) = TwiceDifferentiable( f, g, x, @@ -18,7 +18,7 @@ promote_objtype(method::IPNewton, x, autodiff, inplace::Bool, f, g) = TwiceDiffe inplace = inplace, autodiff = autodiff, ) -promote_objtype(method::IPNewton, x, autodiff, inplace::Bool, f, g, h) = +promote_objtype(method::IPNewton, x, autodiff::ADTypes.AbstractADType, inplace::Bool, f, g, h) = TwiceDifferentiable(f, g, h, x, real(zero(eltype(x))); inplace = inplace) # TODO: Add support for InitialGuess from LineSearches diff --git a/test/general/objective_types.jl b/test/general/objective_types.jl index cbc5b078e..f197c515b 100644 --- a/test/general/objective_types.jl +++ b/test/general/objective_types.jl @@ -4,8 +4,8 @@ @test_throws ErrorException OnceDifferentiable(x -> x, rand(10); autodiff = :wah) for T in (OnceDifferentiable, TwiceDifferentiable) - odad1 = T(x -> 5.0, rand(1); autodiff = :finite) - odad2 = T(x -> 5.0, rand(1); autodiff = :forward) + odad1 = T(x -> 5.0, rand(1); autodiff = AutoFiniteDiff(; fdtype = Val(:central))) + odad2 = T(x -> 5.0, rand(1); autodiff = AutoForwardDiff()) odad3 = T(x -> 5.0, rand(1); autodiff = AutoReverseDiff()) Optim.gradient!(odad1, rand(1)) Optim.gradient!(odad2, rand(1)) @@ -17,8 +17,8 @@ for a in (1.0, 5.0) xa = rand(1) - odad1 = OnceDifferentiable(x -> a * x[1], xa; autodiff = :finite) - odad2 = OnceDifferentiable(x -> a * x[1], xa; autodiff = :forward) + odad1 = OnceDifferentiable(x -> a * x[1], xa; autodiff = AutoFiniteDiff(; fdtype = Val(:central))) + odad2 = OnceDifferentiable(x -> a * x[1], xa; autodiff = AutoForwardDiff()) odad3 = OnceDifferentiable(x -> a * x[1], xa; autodiff = AutoReverseDiff()) Optim.gradient!(odad1, xa) Optim.gradient!(odad2, xa) @@ -29,8 +29,8 @@ end for a in (1.0, 5.0) xa = rand(1) - odad1 = OnceDifferentiable(x -> a * x[1]^2, xa; autodiff = :finite) - odad2 = OnceDifferentiable(x -> a * x[1]^2, xa; autodiff = :forward) + odad1 = OnceDifferentiable(x -> a * x[1]^2, xa; autodiff = AutoFiniteDiff(; fdtype = Val(:central))) + odad2 = OnceDifferentiable(x -> a * x[1]^2, xa; autodiff = AutoForwardDiff()) odad3 = OnceDifferentiable(x -> a * x[1]^2, xa; autodiff = AutoReverseDiff()) Optim.gradient!(odad1, xa) Optim.gradient!(odad2, xa) @@ -40,7 +40,7 @@ @test Optim.gradient(odad3) == 2.0 * a * xa end for dtype in (OnceDifferentiable, TwiceDifferentiable) - for autodiff in (:finite, :forward, AutoReverseDiff()) + for autodiff in (AutoFiniteDiff(; fdtype = Val(:central)), AutoForwardDiff(), AutoReverseDiff()) differentiable = dtype(x -> sum(x), rand(2); autodiff = autodiff) Optim.value(differentiable) Optim.value!(differentiable, rand(2)) diff --git a/test/multivariate/solvers/constrained/fminbox.jl b/test/multivariate/solvers/constrained/fminbox.jl index 344ab93a8..1d9175ba6 100644 --- a/test/multivariate/solvers/constrained/fminbox.jl +++ b/test/multivariate/solvers/constrained/fminbox.jl @@ -121,7 +121,7 @@ optimize(od, lb, ub, initial_x, Fminbox()) nd = NonDifferentiable(exponential, initial_x) optimize(nd, lb, ub, initial_x, Fminbox(NelderMead())) - od_forward = OnceDifferentiable(exponential, initial_x; autodiff = :forward) + od_forward = OnceDifferentiable(exponential, initial_x; autodiff = AutoForwardDiff()) optimize(od_forward, lb, ub, initial_x, Fminbox()) optimize(exponential, lb, ub, initial_x, Fminbox()) optimize(exponential, exponential_gradient!, lb, ub, initial_x, Fminbox()) @@ -131,8 +131,8 @@ optimize(exponential, exponential_gradient!, lb, ub, initial_x) @testset "inplace and autodiff keywords #616" begin optimize(exponential, lb, ub, initial_x, Fminbox()) - optimize(exponential, lb, ub, initial_x, Fminbox(); autodiff = :finite) - optimize(exponential, lb, ub, initial_x, Fminbox(); autodiff = :forward) + optimize(exponential, lb, ub, initial_x, Fminbox(); autodiff = AutoFiniteDiff(; fdtype = Val(:central))) + optimize(exponential, lb, ub, initial_x, Fminbox(); autodiff = AutoForwardDiff()) optimize( exponential, exponential_gradient, @@ -196,7 +196,7 @@ end [1.0], Fminbox(m), ) - optimize(x -> sqrt(x[1]), [0.0], [10.0], [1.0], Fminbox(m); autodiff = :forwarddiff) + optimize(x -> sqrt(x[1]), [0.0], [10.0], [1.0], Fminbox(m); autodiff = AutoForwardDiff()) end end diff --git a/test/multivariate/solvers/constrained/ipnewton/interface.jl b/test/multivariate/solvers/constrained/ipnewton/interface.jl index 57ca31c92..476d183a1 100644 --- a/test/multivariate/solvers/constrained/ipnewton/interface.jl +++ b/test/multivariate/solvers/constrained/ipnewton/interface.jl @@ -11,7 +11,7 @@ using Optim, Test ux = fill(+1.2, dof) dfc = TwiceDifferentiableConstraints(lx, ux) - res = optimize(df, dfc, x0, IPNewton(); autodiff = :forward) + res = optimize(df, dfc, x0, IPNewton(); autodiff = AutoForwardDiff()) res = optimize(df, dfc, x0, IPNewton()) end diff --git a/test/runtests.jl b/test/runtests.jl index ac6ac24f9..c55f8bfae 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -15,7 +15,7 @@ import SparseArrays: normalize!, spdiagm import ForwardDiff import ReverseDiff -using ADTypes: AutoReverseDiff +using ADTypes: AutoFiniteDiff, AutoForwardDiff, AutoReverseDiff debug_printing = false test_broken = false diff --git a/test/special/bigfloat/initial_convergence.jl b/test/special/bigfloat/initial_convergence.jl index 593f27b5f..ed43f938a 100644 --- a/test/special/bigfloat/initial_convergence.jl +++ b/test/special/bigfloat/initial_convergence.jl @@ -1,7 +1,7 @@ @testset "bigfloat initial convergence #720" begin f(x) = x[1]^2 x0 = BigFloat[0] - obj = OnceDifferentiable(f, x0; autodiff = :forward) + obj = OnceDifferentiable(f, x0; autodiff = AutoForwardDiff()) for method in ( GradientDescent, BFGS,