Skip to content

Update jump-api pr #294

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 19 commits into
base: jg/jumpapi
Choose a base branch
from
Open
1 change: 1 addition & 0 deletions docs/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Literate = "98b081ad-f1c9-55d3-8b20-4c87d4299306"
MLDatasets = "eb30cadb-4394-5ae3-aed4-317e484a6458"
MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
Pkg = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
Plots = "91a5bcdd-55d7-5caf-9e0b-520d859cae80"
SCS = "c946c3f1-0d1f-5ce8-9dea-7daa1f7e2d13"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Expand Down
6 changes: 6 additions & 0 deletions docs/make.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@
# Use of this source code is governed by an MIT-style license that can be found
# in the LICENSE.md file or at https://opensource.org/licenses/MIT.

import Pkg
Pkg.add(;
url = "https://github.com/jump-dev/ParametricOptInterface.jl",
rev = "jg/newdo",
)

using Documenter
using DiffOpt
using Literate
Expand Down
35 changes: 24 additions & 11 deletions src/ConicProgram/ConicProgram.jl
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,7 @@
model.back_grad_cache = nothing
empty!(model.input_cache)
empty!(model.x)
empty!(model.s)
empty!(model.s) # TODO: stop using this
empty!(model.y)
model.diff_time = NaN
return
Expand All @@ -141,6 +141,14 @@
return MOI.supports_constraint(model.model, F, S)
end

function MOI.supports_constraint(

Check warning on line 144 in src/ConicProgram/ConicProgram.jl

View check run for this annotation

Codecov / codecov/patch

src/ConicProgram/ConicProgram.jl#L144

Added line #L144 was not covered by tests
::Model,
::Type{MOI.VectorAffineFunction{T}},
::Type{MOI.PositiveSemidefiniteConeSquare},
) where {T}
return false

Check warning on line 149 in src/ConicProgram/ConicProgram.jl

View check run for this annotation

Codecov / codecov/patch

src/ConicProgram/ConicProgram.jl#L149

Added line #L149 was not covered by tests
end

function MOI.set(
model::Model,
::MOI.ConstraintPrimalStart,
Expand Down Expand Up @@ -189,6 +197,7 @@
)
end

# TODO: remove this
if any(isnan, model.s) || length(model.s) < length(b)
error(
"Some constraints are missing a value for the `ConstraintPrimalStart` attribute.",
Expand Down Expand Up @@ -216,10 +225,12 @@
m = A.m
n = A.n
N = m + n + 1

slack = b - A * model.x
# NOTE: w = 1.0 systematically since we asserted the primal-dual pair is optimal
# `inv(M)((x, y, 1), (0, s, 0)) = (x, y, 1) - (0, s, 0)`,
# see Minty parametrization in https://stanford.edu/~boyd/papers/pdf/cone_prog_refine.pdf
(u, v, w) = (model.x, model.y - model.s, 1.0)
(u, v, w) = (model.x, model.y - slack, 1.0)

# find gradient of projections on dual of the cones
Dπv = DiffOpt.Dπ(v, model.model, model.model.constraints.sets)
Expand Down Expand Up @@ -260,12 +271,13 @@
M = gradient_cache.M
vp = gradient_cache.vp
Dπv = gradient_cache.Dπv
x = model.x
y = model.y
s = model.s
A = gradient_cache.A
b = gradient_cache.b
c = gradient_cache.c
x = model.x
y = model.y
# s = model.s
slack = b - A * x

objective_function = DiffOpt._convert(
MOI.ScalarAffineFunction{Float64},
Expand Down Expand Up @@ -309,7 +321,7 @@
n = size(A, 2)
N = m + n + 1
# NOTE: w = 1 systematically since we asserted the primal-dual pair is optimal
(u, v, w) = (x, y - s, 1.0)
(u, v, w) = (x, y - slack, 1.0)

# g = dQ * Π(z/|w|) = dQ * [u, vp, 1.0]
RHS = [
Expand Down Expand Up @@ -340,12 +352,13 @@
M = gradient_cache.M
vp = gradient_cache.vp
Dπv = gradient_cache.Dπv
x = model.x
y = model.y
s = model.s
A = gradient_cache.A
b = gradient_cache.b
c = gradient_cache.c
x = model.x
y = model.y
# s = model.s
slack = b - A * x

dx = zeros(length(c))
for (vi, value) in model.input_cache.dx
Expand All @@ -358,13 +371,13 @@
n = size(A, 2)
N = m + n + 1
# NOTE: w = 1 systematically since we asserted the primal-dual pair is optimal
(u, v, w) = (x, y - s, 1.0)
(u, v, w) = (x, y - slack, 1.0)

# dz = D \phi (z)^T (dx,dy,dz)
dz = [
dx
Dπv' * (dy + ds) - ds
-x' * dx - y' * dy - s' * ds
-x' * dx - y' * dy - slack' * ds
]

g = if LinearAlgebra.norm(dz) <= 1e-4 # TODO: parametrize or remove
Expand Down
18 changes: 12 additions & 6 deletions src/NonLinearProgram/NonLinearProgram.jl
Original file line number Diff line number Diff line change
Expand Up @@ -119,11 +119,10 @@ function MOI.supports_constraint(
return true
end

function MOI.supports_constraint(
function MOI.supports_add_constrained_variable(
::Form,
::Type{MOI.VariableIndex},
::Type{MOI.Parameter{Float64}},
)
::Type{MOI.Parameter{T}},
) where {T}
return true
end

Expand Down Expand Up @@ -300,6 +299,13 @@ function Model()
)
end

function MOI.supports_add_constrained_variable(
::Model,
::Type{MOI.Parameter{T}},
) where {T}
return true
end

_objective_sense(form::Form) = form.sense
_objective_sense(model::Model) = _objective_sense(model.model)

Expand Down Expand Up @@ -513,8 +519,8 @@ function DiffOpt.forward_differentiate!(model::Model; tol = 1e-6)
Δp = zeros(length(cache.params))
for (i, var_idx) in enumerate(cache.params)
ky = form.var2ci[var_idx]
if haskey(model.input_cache.dp, ky) # only for set sensitivities
Δp[i] = model.input_cache.dp[ky]
if haskey(model.input_cache.parameter_constraints, ky) # only for set sensitivities
Δp[i] = model.input_cache.parameter_constraints[ky]
end
end

Expand Down
3 changes: 3 additions & 0 deletions src/copy_dual.jl
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ function _copy_dual(dest::MOI.ModelLike, src::MOI.ModelLike, index_map)
MOI.get(src, MOI.VariablePrimal(), vis_src),
)
for (F, S) in MOI.get(dest, MOI.ListOfConstraintTypesPresent())
if F <: MOI.VariableIndex && S <: MOI.Parameter
continue
end
_copy_constraint_start(
dest,
src,
Expand Down
63 changes: 57 additions & 6 deletions src/diff_opt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@

Base.@kwdef mutable struct InputCache
dx::Dict{MOI.VariableIndex,Float64} = Dict{MOI.VariableIndex,Float64}()# dz for QP
dp::Dict{MOI.ConstraintIndex,Float64} = Dict{MOI.ConstraintIndex,Float64}() # Specifically for NonLinearProgram
dy::Dict{MOI.ConstraintIndex,Float64} = Dict{MOI.ConstraintIndex,Float64}()
# Dual sensitivity currently only works for NonLinearProgram
# ds
Expand All @@ -23,6 +22,8 @@
# concrete value types.
# `scalar_constraints` and `vector_constraints` includes `A` and `b` for CPs
# or `G` and `h` for QPs
parameter_constraints::Dict{MOI.ConstraintIndex,Float64} =
Dict{MOI.ConstraintIndex,Float64}() # Specifically for NonLinearProgram
scalar_constraints::MOIDD.DoubleDict{MOI.ScalarAffineFunction{Float64}} =
MOIDD.DoubleDict{MOI.ScalarAffineFunction{Float64}}() # also includes G for QPs
vector_constraints::MOIDD.DoubleDict{MOI.VectorAffineFunction{Float64}} =
Expand All @@ -33,8 +34,8 @@

function Base.empty!(cache::InputCache)
empty!(cache.dx)
empty!(cache.dp)
empty!(cache.dy)
empty!(cache.parameter_constraints)
empty!(cache.scalar_constraints)
empty!(cache.vector_constraints)
cache.objective = nothing
Expand Down Expand Up @@ -136,6 +137,16 @@
"""
struct ForwardConstraintFunction <: MOI.AbstractConstraintAttribute end

"""
ForwardConstraintSet <: MOI.AbstractConstraintAttribute

A `MOI.AbstractConstraintAttribute` to set input data to forward differentiation, that
is, problem input data.

Currently, this only works for the set `MOI.Parameter`.
"""
struct ForwardConstraintSet <: MOI.AbstractConstraintAttribute end

"""
ForwardVariablePrimal <: MOI.AbstractVariableAttribute

Expand Down Expand Up @@ -167,10 +178,6 @@
"""
struct ReverseVariablePrimal <: MOI.AbstractVariableAttribute end

struct ForwardConstraintSet <: MOI.AbstractConstraintAttribute end

struct ReverseConstraintSet <: MOI.AbstractConstraintAttribute end

"""
ReverseConstraintDual <: MOI.AbstractConstraintAttribute

Expand Down Expand Up @@ -253,6 +260,18 @@

MOI.is_set_by_optimize(::ReverseConstraintFunction) = true

"""
ReverseConstraintSet

An `MOI.AbstractConstraintAttribute` to get output data to reverse differentiation, that
is, problem input data.

Currently, this only works for the set `MOI.Parameter`.
"""
struct ReverseConstraintSet <: MOI.AbstractConstraintAttribute end

MOI.is_set_by_optimize(::ReverseConstraintSet) = true

Check warning on line 273 in src/diff_opt.jl

View check run for this annotation

Codecov / codecov/patch

src/diff_opt.jl#L273

Added line #L273 was not covered by tests

"""
DifferentiateTimeSec()

Expand All @@ -273,6 +292,11 @@
"""
abstract type AbstractModel <: MOI.ModelLike end

function empty_input_sensitivities!(model::AbstractModel)
empty!(model.input_cache)
return
end

MOI.supports_incremental_interface(::AbstractModel) = true

function MOI.is_valid(model::AbstractModel, idx::MOI.Index)
Expand All @@ -287,6 +311,8 @@
return MOI.add_variables(model.model, n)
end

# TODO: add support for add_constrained_variable(s) and supports_

function MOI.Utilities.pass_nonvariable_constraints(
dest::AbstractModel,
src::MOI.ModelLike,
Expand Down Expand Up @@ -409,6 +435,11 @@
ci::MOI.ConstraintIndex{MOI.ScalarAffineFunction{T},S},
func::MOI.ScalarAffineFunction{T},
) where {T,S}
if MOI.supports_add_constrained_variable(model.model, MOI.Parameter{T})
error(

Check warning on line 439 in src/diff_opt.jl

View check run for this annotation

Codecov / codecov/patch

src/diff_opt.jl#L439

Added line #L439 was not covered by tests
"The model with type $(typeof(model)) does support Parameters, so setting ForwardConstraintFunction fails.",
)
end
model.input_cache.scalar_constraints[ci] = func
return
end
Expand All @@ -419,10 +450,30 @@
ci::MOI.ConstraintIndex{MOI.VectorAffineFunction{T},S},
func::MOI.VectorAffineFunction{T},
) where {T,S}
if MOI.supports_add_constrained_variable(model.model, MOI.Parameter{T})
error(

Check warning on line 454 in src/diff_opt.jl

View check run for this annotation

Codecov / codecov/patch

src/diff_opt.jl#L454

Added line #L454 was not covered by tests
"The model with type $(typeof(model)) does support Parameters, so setting ForwardConstraintFunction fails.",
)
end
model.input_cache.vector_constraints[ci] = func
return
end

function MOI.set(
model::AbstractModel,
::ForwardConstraintSet,
ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}},
set::MOI.Parameter{T},
) where {T}
if !MOI.supports_add_constrained_variable(model.model, MOI.Parameter{T})
error(

Check warning on line 469 in src/diff_opt.jl

View check run for this annotation

Codecov / codecov/patch

src/diff_opt.jl#L469

Added line #L469 was not covered by tests
"The model with type $(typeof(model)) does not support Parameters",
)
end
model.input_cache.parameter_constraints[ci] = set.value
return
end

function lazy_combination(op::F, α, a, β, b) where {F<:Function}
return LazyArrays.ApplyArray(
op,
Expand Down
Loading