Skip to content

Commit c0ba9bc

Browse files
committed
ported more code to connect to JuMP
1 parent e9de08a commit c0ba9bc

File tree

10 files changed

+638
-4
lines changed

10 files changed

+638
-4
lines changed

.vscode/settings.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@
4343
"lenstr",
4444
"lualatex",
4545
"Lyutsarev",
46+
"Maciel",
4647
"makedocs",
4748
"mathbb",
4849
"mathbf",
@@ -91,5 +92,6 @@
9192
"Yinyu",
9293
"Zentrum",
9394
"Zuse"
94-
]
95+
],
96+
"julia.environmentPath": "e:\\Repos\\MSR\\OpticalCompute\\AOCoptimizer.jl\\notebooks"
9597
}

Project.toml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
name = "AOCoptimizer"
22
uuid = "ba4aa9bd-6938-48c2-966f-258481ba1c4a"
3-
authors = ["Kirill Kalinin <kkalinin@microsoft.com>", "Christos Gkantsidis <chrisgk@microsoft.com>"]
3+
authors = [
4+
"Kirill Kalinin <kkalinin@microsoft.com>",
5+
"Christos Gkantsidis <chrisgk@microsoft.com>",
6+
"Pedro Maciel Xavier"
7+
]
48
version = "0.2.1"
59

610
[deps]

docs/src/manual/installation.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ If the above does not work with the error
4545
please try the following:
4646

4747
```julia
48-
Pkg.dev(url="https://github.com/microsoft/AOCoptimizer.jl#main")
48+
Pkg.dev(url="https://github.com/microsoft/AOCoptimizer.jl")
4949
```
5050

5151
If you also want to use `CUDA` or `JuMP`,

ext/JuMPExt/JuMPExt.jl

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,20 @@
11
#=
22
JuMPExt.jl
33
4+
Extensions to allow the integration of the AOC optimizer with the JuMP framework.
5+
In reality, the extensions depend only on MathOptInterface, but will mostly be
6+
used through JuMP.
47
8+
The code implemented allows the user to use the AOC optimizer to solve "almost"-QUMO problems.
9+
This extension will transform the "almost"-QUMO problems to QUMO, and then invoke the
10+
AOCoptimizer solver to solve them. Below we implement very simple transformations.
11+
12+
The code below is adapted by the work of Pedro
513
=#
614

715
module JuMPExt
816

17+
using LinearAlgebra
918
import MathOptInterface as MOI
1019
import MathOptInterface: is_empty, empty!, optimize!
1120
import AOCoptimizer as AOC
@@ -23,6 +32,8 @@ const SAF{T} = MOI.ScalarAffineFunction{T}
2332
const SQT{T} = MOI.ScalarQuadraticTerm{T}
2433
const SQF{T} = MOI.ScalarQuadraticFunction{T}
2534

35+
const Engine = AOC.Solver.Engine
36+
2637
include("variables.jl")
2738
include("wrapper.jl")
2839

ext/JuMPExt/attributes.jl

Lines changed: 164 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,164 @@
1+
#=
2+
attributes.jl
3+
4+
Specifies the attributes supported by the optimizer.
5+
Handles, setting and retrieving those attributes.
6+
=#
7+
8+
#=
9+
Custom attributes specific to the optimizer
10+
=#
11+
12+
"""Root type for all AOC attributes"""
13+
abstract type AOCAttribute <: MOI.AbstractOptimizerAttribute end
14+
15+
"""Specifies the random seed to use"""
16+
struct Seed <: AOCAttribute end
17+
18+
"""Specifies the working directory where
19+
the optimizer should store temporary files and updates (where applicable)"""
20+
struct WorkDir <: AOCAttribute end
21+
22+
"""Specifies the numeric type to use (Float64, Float32, Float16, BFloat16),
23+
when running locally (if applicable);
24+
Float64 may not be available when running on the GPU"""
25+
struct NumericType <: AOCAttribute end
26+
27+
"""Specifies the numeric type to use (Float64, Float32, Float16, BFloat16),
28+
when running remotely;
29+
Float64 may not be available when running on the GPU"""
30+
struct Precision <: AOCAttribute end
31+
32+
"""Specifies the backend to use.
33+
By default it is the online service (Service);
34+
other backends are available (e.g., [`RandomAssignment`](@ref) for quick testing)"""
35+
struct Backend <: AOCAttribute end
36+
37+
38+
const AOC_RAW_ATTRIBUTES = Dict{String,Any}(
39+
"seed" => Seed(),
40+
"work_dir" => WorkDir(),
41+
"numeric_type" => NumericType(),
42+
"backend" => Backend(),
43+
)
44+
45+
MOI.supports(::Optimizer, ::AOCAttribute) = true
46+
47+
MOI.get(optimizer::Optimizer, ::Seed) = get(optimizer.aim_attributes, :seed, nothing)
48+
function MOI.set(optimizer::Optimizer, ::Seed, value::Integer)
49+
optimizer.aim_attributes[:seed] = value
50+
return nothing
51+
end
52+
function MOI.set(optimizer::Optimizer, ::Seed, ::Nothing)
53+
delete!(optimizer.aim_attributes, :seed)
54+
return nothing
55+
end
56+
57+
MOI.get(optimizer::Optimizer, ::WorkDir) = get(optimizer.aim_attributes, :work_dir, nothing)
58+
function MOI.set(optimizer::Optimizer, ::WorkDir, value::AbstractString)
59+
optimizer.aim_attributes[:work_dir] = String(value)
60+
return nothing
61+
end
62+
function MOI.set(optimizer::Optimizer, ::WorkDir, ::Nothing)
63+
delete!(optimizer.aim_attributes, :work_dir)
64+
return nothing
65+
end
66+
67+
MOI.get(optimizer::Optimizer{T}, ::NumericType) where {T<:Real} =
68+
get(optimizer.aim_attributes, :numeric_type, T)
69+
70+
function MOI.set(optimizer::Optimizer, ::NumericType, ::Type{T}) where {T<:Real}
71+
optimizer.aim_attributes[:numeric_type] = T
72+
return nothing
73+
end
74+
75+
MOI.get(optimizer::Optimizer, ::Precision) = get(optimizer.aim_attributes, :precision, "Float32")
76+
function MOI.set(optimizer::Optimizer, ::Precision, value::String)
77+
@assert value ("BFloat16", "Float16", "Float32", "Float64")
78+
optimizer.aim_attributes[:precision] = value
79+
return nothing
80+
end
81+
82+
MOI.get(optimizer::Optimizer, ::Backend) = get(optimizer.aim_attributes, :backend, AOC.Solver.best_engine())
83+
function MOI.set(optimizer::Optimizer, ::Backend, value::B) where {B<:Engine}
84+
optimizer.aim_attributes[:backend] = value
85+
return nothing
86+
end
87+
88+
89+
#=
90+
Attributes to interface with the MOI backend
91+
=#
92+
93+
MOI.get(::Optimizer, ::MOI.SolverName) = "AOC Optimizer"
94+
95+
# TODO: This should be AOC's version instead!
96+
MOI.get(::Optimizer, ::MOI.SolverVersion) = AOC.__VERSION__
97+
98+
MOI.get(optimizer::Optimizer, ::MOI.RawSolver) = optimizer
99+
100+
MOI.supports(::Optimizer, attr::MOI.RawOptimizerAttribute) = true # haskey(AOC_RAW_ATTRIBUTES, attr.name)
101+
function MOI.get(optimizer::Optimizer, attr::MOI.RawOptimizerAttribute)
102+
if haskey(AOC_RAW_ATTRIBUTES, attr.name)
103+
return MOI.get(optimizer, AOC_RAW_ATTRIBUTES[attr.name])
104+
else
105+
return optimizer.raw_attributes[attr.name]
106+
end
107+
end
108+
function MOI.set(optimizer::Optimizer, attr::MOI.RawOptimizerAttribute, value::Any)
109+
if haskey(AOC_RAW_ATTRIBUTES, attr.name)
110+
MOI.set(optimizer, AOC_RAW_ATTRIBUTES[attr.name], value)
111+
else
112+
optimizer.raw_attributes[attr.name] = value
113+
end
114+
115+
return nothing
116+
end
117+
118+
MOI.supports(::Optimizer, ::MOI.Name) = true
119+
MOI.get(optimizer::Optimizer, ::MOI.Name) = get(optimizer.moi_attributes, :name, "")
120+
function MOI.set(optimizer::Optimizer, ::MOI.Name, value::AbstractString)
121+
optimizer.moi_attributes[:name] = String(value)
122+
return nothing
123+
end
124+
125+
MOI.supports(::Optimizer, ::MOI.Silent) = true
126+
MOI.get(optimizer::Optimizer, ::MOI.Silent) = get(optimizer.moi_attributes, :silent, false)
127+
function MOI.set(optimizer::Optimizer, ::MOI.Silent, value::Bool)
128+
optimizer.moi_attributes[:silent] = value
129+
return nothing
130+
end
131+
132+
MOI.supports(::Optimizer, ::MOI.TimeLimitSec) = true
133+
MOI.get(optimizer::Optimizer, ::MOI.TimeLimitSec) = get(optimizer.moi_attributes, :time_limit_sec, nothing)
134+
function MOI.set(optimizer::Optimizer, ::MOI.TimeLimitSec, value::Real)
135+
@assert value >= 0
136+
optimizer.moi_attributes[:time_limit_sec] = Float64(value)
137+
return nothing
138+
end
139+
function MOI.set(optimizer::Optimizer, ::MOI.TimeLimitSec, ::Nothing)
140+
delete!(optimizer.moi_attributes, :time_limit_sec)
141+
return nothing
142+
end
143+
144+
MOI.supports(::Optimizer, ::MOI.NumberOfThreads) = true
145+
MOI.get(optimizer::Optimizer, ::MOI.NumberOfThreads) = get(optimizer.moi_attributes, :number_of_threads, 2)
146+
147+
function MOI.set(optimizer::Optimizer, ::MOI.NumberOfThreads, value::Integer)
148+
@assert value >= 1
149+
150+
optimizer.moi_attributes[:number_of_threads] = value
151+
return nothing
152+
end
153+
154+
155+
#=
156+
Unsupported attributes
157+
=#
158+
159+
MOI.supports(::Optimizer, ::MOI.ObjectiveLimit) = false
160+
MOI.supports(::Optimizer, ::MOI.SolutionLimit) = false
161+
MOI.supports(::Optimizer, ::MOI.AbsoluteGapTolerance) = false
162+
MOI.supports(::Optimizer, ::MOI.RelativeGapTolerance) = false
163+
MOI.supports(::Optimizer, ::MOI.AbstractModelAttribute) = false
164+
MOI.supports(::Optimizer, ::MOI.AbstractOptimizerAttribute) = false

ext/JuMPExt/constraints.jl

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
#=
2+
constraints.jl
3+
4+
Specifies the constraints supported by the optimizer.
5+
At this low level, we only support the following constraints:
6+
- Binary variables
7+
- Continuous variables with lower and upper bounds.
8+
Observe that both need to be specified (i.e., no unbounded variables).
9+
=#
10+
11+
"""Optimizer supports binary variables"""
12+
function MOI.supports_constraint(
13+
::Optimizer,
14+
::Type{VI},
15+
::Type{MOI.ZeroOne},
16+
)
17+
return true
18+
end
19+
20+
"""Optimizer supports continuous variables with lower and upper bounds"""
21+
function MOI.supports_constraint(
22+
::Optimizer{T},
23+
::Type{VI},
24+
::Type{F},
25+
) where {T,F<:Union{EQ{T},LT{T},GT{T},MOI.Interval{T}}}
26+
return true
27+
end
28+
29+
"""Optimizer does not support generic constraints"""
30+
function MOI.supports_constraint(
31+
::Optimizer,
32+
::Type{S},
33+
::Type{F},
34+
) where {S<:MOI.AbstractSet,F<:MOI.AbstractFunction}
35+
return false
36+
end

ext/JuMPExt/objective.jl

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
#=
2+
objective.jl
3+
4+
Specifies the objective functions supported by the optimizer.
5+
The optimization function can be either an affine or a quadratic function.
6+
The optimization goal is either to minimize or maximize the objective function.
7+
=#
8+
9+
MOI.get(optimizer::Optimizer, ::MOI.ObjectiveSense) = optimizer.sense
10+
11+
function MOI.set(optimizer::Optimizer, ::MOI.ObjectiveSense, value::MOI.OptimizationSense)
12+
@assert value in (MOI.MAX_SENSE, MOI.MIN_SENSE)
13+
14+
optimizer.sense = value
15+
return nothing
16+
end
17+
18+
"""Optimization can be configured with an optimization sense"""
19+
MOI.supports(::Optimizer, ::MOI.ObjectiveSense) = true
20+
"""Optimizer supports optimization over a single variable"""
21+
MOI.supports(::Optimizer, ::MOI.ObjectiveFunction{VI}) = true
22+
"""Optimizer support optimization of linear and quadratic functions"""
23+
MOI.supports(::Optimizer, ::MOI.ObjectiveFunction{F}) where {T,F<:Union{SAF{T},SQF{T}}} = true

ext/JuMPExt/solutions.jl

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
function MOI.get(optimizer::Optimizer, pm::MOI.PrimalStatus)
2+
if 1 <= pm.result_index <= MOI.get(optimizer, MOI.ResultCount())
3+
# Unconstrained problems are always feasible :)
4+
return MOI.FEASIBLE_POINT
5+
else
6+
return MOI.NO_SOLUTION
7+
end
8+
end
9+
10+
function MOI.get(::Optimizer, ::MOI.DualStatus)
11+
# No constraints, no duals :(
12+
return MOI.NO_SOLUTION
13+
end
14+
15+
function MOI.get(::Optimizer, ::MOI.RawStatusString)
16+
return ""
17+
end
18+
19+
function MOI.get(optimizer::Optimizer, ::MOI.ResultCount)
20+
if isnothing(optimizer.output)
21+
return 0
22+
else
23+
return 1
24+
end
25+
end
26+
27+
function MOI.get(optimizer::Optimizer, ::MOI.TerminationStatus)
28+
if isnothing(optimizer.output)
29+
return MOI.OPTIMIZE_NOT_CALLED
30+
else
31+
# This is everything we can say about the termination status
32+
# as in every other optimization heuristic method.
33+
return MOI.LOCALLY_SOLVED
34+
end
35+
end
36+
37+
MOI.supports(::Optimizer, ::MOI.VariablePrimal, ::VI) = true
38+
function MOI.get(optimizer::Optimizer, vp::MOI.VariablePrimal, vi::VI)
39+
@assert 1 <= vp.result_index <= MOI.get(optimizer, MOI.ResultCount())
40+
41+
if haskey(optimizer.fixed, vi)
42+
return optimizer.fixed[vi]
43+
end
44+
45+
yi = optimizer.output["Assignment"][optimizer.variable_map[vi]]
46+
47+
if optimizer.variable_info[vi].type === :binary
48+
return yi
49+
else # optimizer.variable_info[vi].type === continuous
50+
li = optimizer.variable_info[vi].lower
51+
ui = optimizer.variable_info[vi].upper
52+
53+
return li + (yi + 1) * (ui - li) / 2
54+
end
55+
end
56+
57+
MOI.supports(::Optimizer, ::MOI.ObjectiveValue) = true
58+
function MOI.get(optimizer::Optimizer, ov::MOI.ObjectiveValue)
59+
@assert 1 <= ov.result_index <= MOI.get(optimizer, MOI.ResultCount())
60+
61+
return optimizer.output["Objective"] + optimizer.offset
62+
end

0 commit comments

Comments
 (0)