Skip to content

Commit bc47e64

Browse files
committed
started working on a simple integration of solver with JuMP
1 parent 76515fa commit bc47e64

File tree

9 files changed

+287
-0
lines changed

9 files changed

+287
-0
lines changed

.vscode/settings.json

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,12 @@
4444
"lualatex",
4545
"Lyutsarev",
4646
"makedocs",
47+
"mathbb",
48+
"mathbf",
4749
"mathengine",
4850
"Mersenne",
4951
"Mohseni",
52+
"MOIU",
5053
"Mourgias",
5154
"Naeimeh",
5255
"nargs",
@@ -72,13 +75,15 @@
7275
"Sobol",
7376
"sprintf",
7477
"stdlib",
78+
"subseteq",
7579
"testset",
7680
"Tidyverse",
7781
"timedout",
7882
"toplevel",
7983
"triu",
8084
"Vassily",
8185
"versioninfo",
86+
"vmap",
8287
"weakdeps",
8388
"writeheader",
8489
"xchg",

Project.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,12 @@ TOML = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
2727

2828
[weakdeps]
2929
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
30+
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
31+
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
3032

3133
[extensions]
3234
CUDAExt = "CUDA"
35+
JuMPExt = ["JuMP", "MathOptInterface"]
3336

3437
[compat]
3538
Adapt = "4.3"
@@ -46,6 +49,7 @@ Distributions = "0.25"
4649
IntervalSets = "0.7"
4750
JET = "0.9"
4851
JSON = "0.21"
52+
JuMP = "1"
4953
KernelAbstractions = "0.9"
5054
LinearAlgebra = "1.11"
5155
OrderedCollections = "1.8"

ext/JuMPExt/JuMPExt.jl

Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
#=
2+
JuMPExt.jl
3+
4+
5+
=#
6+
7+
module JuMPExt
8+
9+
import MathOptInterface as MOI
10+
import MathOptInterface: is_empty, empty!, optimize!
11+
import AOCoptimizer as AOC
12+
13+
export Optimizer
14+
15+
const MOIU = MOI.Utilities
16+
const VI = MOI.VariableIndex
17+
const CI{S,F} = MOI.ConstraintIndex{S,F}
18+
const EQ{T} = MOI.EqualTo{T}
19+
const LT{T} = MOI.LessThan{T}
20+
const GT{T} = MOI.GreaterThan{T}
21+
const SAT{T} = MOI.ScalarAffineTerm{T}
22+
const SAF{T} = MOI.ScalarAffineFunction{T}
23+
const SQT{T} = MOI.ScalarQuadraticTerm{T}
24+
const SQF{T} = MOI.ScalarQuadraticFunction{T}
25+
26+
include("variables.jl")
27+
include("wrapper.jl")
28+
29+
end # module

ext/JuMPExt/variables.jl

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
#=
2+
variables.jl
3+
=#
4+
5+
"""
6+
Variable{T}
7+
8+
A struct representing a variable in the optimization problem
9+
as it can be used in the AOCoptimizer. All variables should
10+
have upper and lower bounds. This is trivial for binary variables,
11+
but for continuous, the user should specify the bounds (or,
12+
they should be easy to infer from the constraints---not implemented yet).
13+
"""
14+
struct Variable{T}
15+
type::Symbol
16+
lower::Union{T,Nothing}
17+
upper::Union{T,Nothing}
18+
19+
function Variable{T}(type::Symbol; lower::Union{T,Nothing} = nothing, upper::Union{T,Nothing} = nothing) where T
20+
@assert type (:continuous, :binary)
21+
22+
if type === :binary
23+
@assert isnothing(lower) && isnothing(upper)
24+
25+
lower = zero(T)
26+
upper = one(T)
27+
else # type === :continuous
28+
if !isnothing(lower) && !isnothing(upper)
29+
@assert lower <= upper
30+
end
31+
end
32+
33+
return new{T}(type, lower, upper)
34+
end
35+
end
36+
37+
const VariableInfo{T} = Dict{VI,Variable{T}}
38+
39+
function is_bounded(v::Variable)
40+
return !isnothing(v.lower) && !isnothing(v.upper)
41+
end
42+
43+
#
44+
45+
raw"""
46+
_scaling(l::V, u::V, L::V, U::V) where {T,V<:AbstractVector{T}}
47+
48+
Let ``\mathbf{y} \in [l, u] \subseteq \mathbb{R}^{n}`` be a vector of variables
49+
in the original model and ``\mathbf{Y} \in [L, U] \subseteq \mathbb{R}^{n}``
50+
the corresponding vector in the solver's frame of reference.
51+
52+
Then,
53+
54+
```math
55+
\begin{align*}
56+
\mathbf{Y} &= \mathbf{L} + (\mathbf{y} - \mathbf{l}) \odot (\mathbf{U} - \mathbf{L}) \odiv (\mathbf{u} - \mathbf{l}) \\
57+
&= \mathbf{L} - \mathbf{l} \odot (\mathbf{U} - \mathbf{L}) \odiv (\mathbf{u} - \mathbf{l}) + \mathbf{y} \odot (\mathbf{U} - \mathbf{L}) \odiv (\mathbf{u} - \mathbf{l})
58+
\end{align*}
59+
```
60+
61+
Therefore, the linear transformation ``\mathbf{Y} = \mathbf{A} \mathbf{y} + \mathbf{b}`` is given by
62+
63+
```math
64+
\begin{align*}
65+
\mathbf{A} &= \text{diag}\left(\frac{\mathbf{U} - \mathbf{L}}{\mathbf{u} - \mathbf{l}}\right) \\
66+
\mathbf{b} &= \mathbf{L} - \mathbf{l} \odot \frac{\mathbf{U} - \mathbf{L}}{\mathbf{u} - \mathbf{l}}
67+
\end{align*}
68+
```
69+
70+
"""
71+
function _scaling(l::V, u::V, L::V, U::V) where {T,V<:AbstractVector{T}}
72+
#=
73+
We only need that `u .!= l`. If `u .== l`, then the best is to simplify the problem,
74+
but this should not happen here.
75+
In principle, we could allow `u_i < l_i` or `U_i < L_i`, for some i's,
76+
but this sounds counter-intuitive.
77+
=#
78+
@assert all(u .> l)
79+
@assert all(U .> L)
80+
81+
S = (u - l) ./ (U - L)
82+
M = (u+l) / T(2) - S .* (U + L) / T(2)
83+
A = Diagonal(S)
84+
85+
return (A, M)
86+
end
87+
88+
function _scaling(info::VariableInfo{T}, vmap::Dict{VI,Int}) where {T}
89+
n = length(vmap)
90+
l = zeros(T, n)
91+
u = zeros(T, n)
92+
L = Vector{T}(undef, n)
93+
U = Vector{T}(undef, n)
94+
95+
for (vi, i) in vmap
96+
v = info[vi]
97+
98+
@assert is_bounded(v)
99+
100+
l[i] = v.lower
101+
u[i] = v.upper
102+
103+
if v.type === :binary
104+
L[i] = zero(T)
105+
U[i] = one(T)
106+
else # v.type === :continuous
107+
L[i] = -one(T)
108+
U[i] = one(T)
109+
end
110+
end
111+
112+
return _scaling(l, u, L, U)
113+
end
114+

ext/JuMPExt/wrapper.jl

Lines changed: 98 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
#=
2+
wrapper.jl
3+
4+
=#
5+
6+
"""
7+
Optimizer{T} <: MOI.AbstractOptimizer
8+
9+
This struct is responsible for integrating AOCoptimizer with MathOptInterface,
10+
which is how we can build a solver that can be used by JuMP models.
11+
12+
```julia
13+
using JuMP
14+
using AOCoptimizer
15+
16+
model = Model(AOCoptimizer.Optimizer)
17+
18+
@variable(model, x[1:5], Bin)
19+
@variable(model, -2 <= y[1:5] <= 2)
20+
21+
@objective(model, Max, sum(x) - sum(y) + 2 * x'y)
22+
23+
optimize!(model)
24+
```
25+
"""
26+
mutable struct Optimizer{T} <: MOI.AbstractOptimizer
27+
sense::MOI.OptimizationSense
28+
29+
quadratic::Matrix{T}
30+
linear::Union{Vector{T},Nothing}
31+
offset::T
32+
continuous::Union{Vector{Bool},Nothing}
33+
34+
moi_attributes::Dict{Symbol,Any}
35+
raw_attributes::Dict{String,Any}
36+
aim_attributes::Dict{Symbol,Any}
37+
38+
variable_map::Dict{VI,Int}
39+
variable_info::VariableInfo{T}
40+
41+
fixed::Dict{VI,T}
42+
43+
output::Union{Dict{String,Any},Nothing}
44+
45+
function Optimizer{T}() where {T}
46+
return new{T}(
47+
MOI.MIN_SENSE, # sense
48+
Matrix{T}(undef, 0, 0), # quadratic
49+
nothing, # linear
50+
zero(T), # offset
51+
nothing, # continuous
52+
Dict{Symbol,Any}( # moi - default
53+
:name => "",
54+
:silent => false,
55+
:time_limit_sec => nothing,
56+
),
57+
Dict{String,Any}(),
58+
Dict{Symbol,Any}( # aim - default
59+
:seed => 0,
60+
),
61+
Dict{VI,Int}(), # variable_map
62+
Dict{VI,Variable{T}}(), # variable_info
63+
Dict{VI,T}(), # fixed variables
64+
nothing,
65+
)
66+
end
67+
68+
Optimizer() = Optimizer{Float64}()
69+
end
70+
71+
function MOI.empty!(optimizer::Optimizer{T}) where {T}
72+
optimizer.sense = MOI.MIN_SENSE
73+
optimizer.quadratic = Matrix{T}(undef, 0, 0)
74+
optimizer.linear = nothing
75+
optimizer.offset = zero(T)
76+
optimizer.continuous = nothing
77+
optimizer.output = nothing
78+
79+
Base.empty!(optimizer.variable_map)
80+
Base.empty!(optimizer.variable_info)
81+
Base.empty!(optimizer.fixed)
82+
83+
return optimizer
84+
end
85+
86+
function MOI.is_empty(optimizer::Optimizer{T}) where {T}
87+
return isempty(optimizer.quadratic) &&
88+
isnothing(optimizer.linear) &&
89+
isnothing(optimizer.continuous) &&
90+
iszero(optimizer.offset) &&
91+
isempty(optimizer.variable_map) &&
92+
isempty(optimizer.variable_info) &&
93+
isempty(optimizer.fixed)
94+
end
95+
96+
function Base.show(io::IO, ::Optimizer)
97+
return print(io, "AOC Optimizer")
98+
end

notebooks/Project.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,10 @@ Cthulhu = "f68482b8-f384-11e8-15f7-abe071a5a75f"
88
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
99
IJulia = "7073ff75-c697-5162-941a-fcdaad2a7d2a"
1010
JSON = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
11+
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
1112
KernelAbstractions = "63c18a36-062a-441e-b654-da1e3ab1ce7c"
1213
Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306"
14+
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
1315
Quarto = "d7167be5-f61b-4dc9-b75c-ab62374668c5"
1416
Revise = "295af30f-e4ad-537b-8983-00126c2a3abe"
1517

notebooks/test-jump.jl

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
#nb # %% A slide [markdown] {"slideshow": {"slide_type": "slide"}}
2+
# # Using AOCoptimizer from JuMP
3+
4+
using Revise
5+
using MathOptInterface
6+
using JuMP
7+
using AOCoptimizer
8+
9+
AOCoptimizer.init()
10+
11+
model = Model(AOCoptimizer.MOI.Optimizer[])
12+
@variable(model, x, Bin)
13+
@variable(model, y, Bin)
14+
@variable(model, -1 <= z <= 1)
15+
@objective(model, Min, x + y * z)
16+
17+
# The following will solve with default settings,
18+
# using 60sec as timeout value.
19+
optimize!(model)

src/AOCoptimizer.jl

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,4 +52,5 @@ include("precompile.jl")
5252

5353
include("init.jl")
5454

55+
5556
end # module

src/init.jl

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,28 @@ init.jl
33
44
=#
55

6+
module MOI
7+
Optimizer :: Ref{Any} = nothing
8+
9+
function __init()
10+
__modules = Base.loaded_modules_array()
11+
__index_of_jump_ext = findfirst(x -> nameof(x) == :JuMPExt, __modules)
12+
__jump_ext = __modules[__index_of_jump_ext]
13+
Optimizer[] = __jump_ext.Optimizer
14+
end
15+
end
16+
617
function init()
718
@debug "Initializing AOCoptimizer..."
819

920
Solver.__register_non_linearities()
1021
Solver.__register_engines()
1122
Solver.__register_solvers()
1223

24+
MOI.__init()
25+
1326
@debug "End of AOCoptimizer initialization."
1427
return
1528
end
29+
30+

0 commit comments

Comments
 (0)