Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
158 changes: 158 additions & 0 deletions src/Test/test_nonlinear.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2382,3 +2382,161 @@ function setup_test(
end

version_added(::typeof(test_vector_nonlinear_oracle_no_hessian)) = v"1.46.0"

function test_VectorNonlinearOracle_LagrangeMultipliers_MAX_SENSE(
model::MOI.ModelLike,
config::MOI.Test.Config{T},
) where {T}
@requires _supports(config, MOI.optimize!)
@requires _supports(config, MOI.ConstraintDual)
@requires _supports(config, MOI.LagrangeMultiplier)
@requires MOI.supports_constraint(
model,
MOI.VectorOfVariables,
MOI.VectorNonlinearOracle{T},
)
set = MOI.VectorNonlinearOracle(;
dimension = 2,
l = T[typemin(T)],
u = T[1],
eval_f = (ret, x) -> (ret[1] = x[1]^2 + x[2]^2),
jacobian_structure = [(1, 1), (1, 2)],
eval_jacobian = (ret, x) -> ret .= T(2) .* x,
hessian_lagrangian_structure = [(1, 1), (2, 2)],
eval_hessian_lagrangian = (ret, x, u) -> ret .= T(2) .* u[1],
)
x = MOI.add_variables(model, 2)
MOI.set(model, MOI.ObjectiveSense(), MOI.MAX_SENSE)
f = one(T) * x[1] + one(T) * x[2]
MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f)
c = MOI.add_constraint(model, MOI.VectorOfVariables(x), set)
y = T(1) / sqrt(T(2))
CI = MOI.ConstraintIndex{MOI.VectorOfVariables,MOI.VectorNonlinearOracle{T}}
if MOI.supports(model, MOI.LagrangeMultiplierStart(), CI)
MOI.set(model, MOI.LagrangeMultiplierStart(), c, T[-y])
end
MOI.optimize!(model)
@test isapprox(MOI.get(model, MOI.VariablePrimal(), x), [y, y], config)
@test isapprox(MOI.get(model, MOI.ConstraintDual(), c), T[-1, -1], config)
@test isapprox(MOI.get(model, MOI.LagrangeMultiplier(), c), T[-y])
# Test `set` just for code coverage
x = T[1, 2]
ret = T[0]
set.eval_f(ret, x)
@test ret == T[5]
ret = T[0, 0]
set.eval_jacobian(ret, x)
@test ret == T[2, 4]
set.eval_hessian_lagrangian(ret, x, T[-1])
@test ret == [-2, -2]
return
end

function setup_test(
::typeof(test_VectorNonlinearOracle_LagrangeMultipliers_MAX_SENSE),
model::MOIU.MockOptimizer,
config::Config{T},
) where {T}
F, S = MOI.VectorOfVariables, MOI.VectorNonlinearOracle{T}
y = T(1) / sqrt(T(2))
MOI.Utilities.set_mock_optimize!(
model,
mock -> begin
MOI.Utilities.mock_optimize!(
mock,
config.optimal_status,
T[y, y],
(F, S) => [T[-1, -1]],
)
ci = only(MOI.get(mock, MOI.ListOfConstraintIndices{F,S}()))
MOI.set(mock, MOI.LagrangeMultiplier(), ci, T[-y])
end,
)
model.eval_variable_constraint_dual = false
return () -> model.eval_variable_constraint_dual = true
end

function version_added(
::typeof(test_VectorNonlinearOracle_LagrangeMultipliers_MAX_SENSE),
)
return v"1.48.0"
end

function test_VectorNonlinearOracle_LagrangeMultipliers_MIN_SENSE(
model::MOI.ModelLike,
config::MOI.Test.Config{T},
) where {T}
@requires _supports(config, MOI.optimize!)
@requires _supports(config, MOI.ConstraintDual)
@requires _supports(config, MOI.LagrangeMultiplier)
@requires MOI.supports_constraint(
model,
MOI.VectorOfVariables,
MOI.VectorNonlinearOracle{T},
)
set = MOI.VectorNonlinearOracle(;
dimension = 2,
l = T[-1],
u = T[typemax(T)],
eval_f = (ret, x) -> (ret[1] = -x[1]^2 - x[2]^2),
jacobian_structure = [(1, 1), (1, 2)],
eval_jacobian = (ret, x) -> ret .= -T(2) .* x,
hessian_lagrangian_structure = [(1, 1), (2, 2)],
eval_hessian_lagrangian = (ret, x, u) -> ret .= -T(2) .* u[1],
)
x = MOI.add_variables(model, 2)
MOI.set(model, MOI.ObjectiveSense(), MOI.MIN_SENSE)
f = one(T) * x[1] + one(T) * x[2]
MOI.set(model, MOI.ObjectiveFunction{typeof(f)}(), f)
c = MOI.add_constraint(model, MOI.VectorOfVariables(x), set)
y = T(1) / sqrt(T(2))
CI = MOI.ConstraintIndex{MOI.VectorOfVariables,MOI.VectorNonlinearOracle{T}}
if MOI.supports(model, MOI.LagrangeMultiplierStart(), CI)
MOI.set(model, MOI.LagrangeMultiplierStart(), c, T[y])
end
MOI.optimize!(model)
@test isapprox(MOI.get(model, MOI.VariablePrimal(), x), [-y, -y], config)
@test isapprox(MOI.get(model, MOI.ConstraintDual(), c), T[1, 1], config)
@test isapprox(MOI.get(model, MOI.LagrangeMultiplier(), c), T[y])
# Test `set` just for code coverage
x = T[1, 2]
ret = T[0]
set.eval_f(ret, x)
@test ret == T[-5]
ret = T[0, 0]
set.eval_jacobian(ret, x)
@test ret == T[-2, -4]
set.eval_hessian_lagrangian(ret, x, T[-1])
@test ret == [2, 2]
return
end

function setup_test(
::typeof(test_VectorNonlinearOracle_LagrangeMultipliers_MIN_SENSE),
model::MOIU.MockOptimizer,
config::Config{T},
) where {T}
F, S = MOI.VectorOfVariables, MOI.VectorNonlinearOracle{T}
y = T(1) / sqrt(T(2))
MOI.Utilities.set_mock_optimize!(
model,
mock -> begin
MOI.Utilities.mock_optimize!(
mock,
config.optimal_status,
T[-y, -y],
(F, S) => [T[1, 1]],
)
ci = only(MOI.get(mock, MOI.ListOfConstraintIndices{F,S}()))
MOI.set(mock, MOI.LagrangeMultiplier(), ci, T[y])
end,
)
model.eval_variable_constraint_dual = false
return () -> model.eval_variable_constraint_dual = true
end

function version_added(
::typeof(test_VectorNonlinearOracle_LagrangeMultipliers_MIN_SENSE),
)
return v"1.48.0"
end
17 changes: 16 additions & 1 deletion src/Utilities/mockoptimizer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,10 @@ mutable struct MockOptimizer{MT<:MOI.ModelLike,T} <: MOI.AbstractOptimizer
Dict{Int,MOI.BasisStatusCode},
}
variable_basis_status::Dict{MOI.VariableIndex,Dict{Int,MOI.BasisStatusCode}}
constraint_attributes::Dict{
MOI.AbstractConstraintAttribute,
Dict{MOI.ConstraintIndex,Any},
}
end

function MockOptimizer(
Expand Down Expand Up @@ -133,6 +137,7 @@ function MockOptimizer(
# Basis status
Dict{MOI.ConstraintIndex,Dict{Int,MOI.BasisStatusCode}}(),
Dict{MOI.VariableIndex,Dict{Int,MOI.BasisStatusCode}}(),
Dict{MOI.AbstractConstraintAttribute,Dict{MOI.ConstraintIndex,Any}}(),
)
end

Expand Down Expand Up @@ -421,7 +426,14 @@ function MOI.set(
idx::MOI.ConstraintIndex,
value,
)
MOI.set(mock.inner_model, attr, xor_index(idx), value)
if MOI.is_set_by_optimize(attr)
ret = get!(mock.constraint_attributes, attr) do
return Dict{MOI.ConstraintIndex,Any}()
end
ret[idx] = value
else
MOI.set(mock.inner_model, attr, xor_index(idx), value)
end
return
end

Expand Down Expand Up @@ -660,6 +672,9 @@ function MOI.get(
)
# If it is thrown by `mock.inner_model`, the index will be xor'ed.
MOI.throw_if_not_valid(mock, idx)
if MOI.is_set_by_optimize(attr)
return mock.constraint_attributes[attr][idx]
end
return MOI.get(mock.inner_model, attr, xor_index(idx))
end

Expand Down
67 changes: 67 additions & 0 deletions src/attributes.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3272,6 +3272,72 @@ function get_fallback(
return supports_constraint(model, F, S) ? 0.0 : Inf
end

"""
LagrangeMultiplier(result_index::Int = 1)

An [`AbstractConstraintAttribute`](@ref) for the Lagrange multiplier associated
with a constraint.

## Relationship to `ConstraintDual`

This attribute differs from [`ConstraintDual`](@ref) in one important case.
When there is a [`VectorNonlinearOracle`](@ref) constraint of the form:
```math
x \\in VectorNonlinearOracle
```
the associated [`ConstraintDual`](@ref) is ``\\mu^\\top \\nabla f(x)``, and the
value of [`LagrangeMultiplier`](@ref) is the vector ``\\mu`` directly.

Both values are useful in different circumstances.

## DualStatus

Before quering this attribute you should first check [`DualStatus`](@ref) to
confirm that a dual solution is avaiable.

If the [`DualStatus`](@ref) is [`NO_SOLUTION`](@ref) the result of querying
this attribute is undefined.

## `result_index`

The optimizer may return multiple dual solutions. See [`ResultCount`](@ref)
for information on how the results are ordered.

If the solver does not have a dual value for the constraint because the
`result_index` is beyond the available solutions (whose number is indicated by
the [`ResultCount`](@ref) attribute), getting this attribute must throw a
[`ResultIndexBoundsError`](@ref).

## Implementation

Optimizers should implement the following methods:
```
MOI.get(::Optimizer, ::MOI.LagrangeMultiplier, ::MOI.ConstraintIndex)
```
They should not implement [`set`](@ref) or [`supports`](@ref).

Solvers should implement [`LagrangeMultiplier`](@ref) only if they also
implement the [`ConstraintDual`](@ref), and only if the two values are
different.
"""
struct LagrangeMultiplier <: AbstractConstraintAttribute
result_index::Int

LagrangeMultiplier(result_index::Int = 1) = new(result_index)
end

"""
LagrangeMultiplierStart()

An [`AbstractConstraintAttribute`](@ref) for the initial assignment to the
constraint's [`LagrangeMultiplier`](@ref) that the optimizer may use to
warm-start the solve.

May be `nothing` (unset), a number for [`AbstractScalarFunction`](@ref), or a
vector for [`AbstractVectorFunction`](@ref).
"""
struct LagrangeMultiplierStart <: AbstractConstraintAttribute end

"""
is_set_by_optimize(::AnyAttribute)

Expand Down Expand Up @@ -3330,6 +3396,7 @@ function is_set_by_optimize(
ConstraintDual,
ConstraintBasisStatus,
VariableBasisStatus,
LagrangeMultiplier,
},
)
return true
Expand Down
Loading