Skip to content

Commit

Permalink
complements, EMP and misc improvements
Browse files Browse the repository at this point in the history
  • Loading branch information
xhub committed Mar 14, 2022
1 parent 7f7f816 commit 73864ba
Show file tree
Hide file tree
Showing 20 changed files with 565 additions and 201 deletions.
8 changes: 5 additions & 3 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
name = "ReSHOP"
uuid = "97740bd8-9665-555f-bcf8-31e31065c979"
license = "MIT"
desc = "Reformulation Solver for Hierachical Optimization Problem"
desc = "Reformulation Solver for Hierarchical Optimization Problems"
repo = "https://github.com/xhub/ReSHOP.jl.git"
authors = ["Olivier Huber <oli.huber@gmail.com>"]
version = "0.1.2"
version = "0.1.3"

[deps]
Compat = "34da2185-b29b-5c13-b0c7-acf172513d20"
DataDeps = "124859b0-ceae-595e-8997-d05f6a7a8dfe"
JuMP = "4076af6c-e467-56ae-b986-b466b2749572"
Libdl = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
Expand All @@ -22,7 +23,8 @@ Compat = "1, 2, 3"
MathOptInterface = "^0.9"
MathProgBase = "^0.7.7"
JuMP = "0.18, 0.19, 0.20, 0.21"
ReSHOP_jll = "= 0.1.1"
DataDeps = "0.7"
ReSHOP_jll = "= 0.1.2"

[extras]
MINLPTests = "ee0a3090-8ee9-5cdb-b8cb-8eeba3165522"
Expand Down
4 changes: 2 additions & 2 deletions src/MBP_wrapper/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -237,14 +237,14 @@ function reshop_add_contraint_sense(ctx, m::ReSHOPMathProgBaseModel, offset)
end
eidx = m.nonquad_idx[idx] + offset - 1 + m.offset
CONFIG[:debug] && println("Setting sense and rhs for equation $eidx: $rel $value")
reshop_set_rhs(ctx, eidx, value)
reshop_set_cst(ctx, eidx, -value)
reshop_set_equtype(ctx, eidx, rel)
end

for (idx, equ) in enumerate(m.quad_equs)
rel, value = equ[end-1:end]
eidx = m.quad_idx[idx] + offset - 1 + m.offset
reshop_set_rhs(ctx, eidx, value)
reshop_set_cst(ctx, eidx, -value)
reshop_set_equtype(ctx, eidx, relation_to_reshop[rel])
end
end
Expand Down
36 changes: 26 additions & 10 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
sense::MOI.OptimizationSense
status::Cint

vov_mapping::Dict{MOI.ConstraintIndex{VOV, <: Union{VLS, MOI.SOS1{Float64}, MOI.SOS2{Float64}}}, Vector{Cuint}}
vaf_mapping::Dict{MOI.ConstraintIndex{VAF, <: VLS}, Vector{Cuint}}
vov_mapping::Dict{MOI.ConstraintIndex{VOV, <: Union{VLS, MOI.SOS1{Float64}, MOI.SOS2{Float64}, MOI.Complements}}, Vector{Cuint}}
vaf_mapping::Dict{MOI.ConstraintIndex{VAF, <: Union{VLS, MOI.Complements}}, Vector{Cuint}}
quadfn_mapping::Dict{MOI.ConstraintIndex{MOI.ScalarQuadraticFunction{Float64}, <: SS}, Cuint}
sos_sets::Dict{MOI.ConstraintIndex{VOV, <: Union{MOI.SOS1{Float64}, MOI.SOS2{Float64}}}, Union{MOI.SOS1{Float64}, MOI.SOS2{Float64}}}
fake_cons_name::Dict{MOI.ConstraintIndex, String}
Expand All @@ -76,25 +76,30 @@ mutable struct Optimizer <: MOI.AbstractOptimizer
gams_dir::String
avar_cache::Union{Ptr{abstract_var}, Nothing}
solver_name::String
solver_stack::String
start_nl_cons::Int
len_nl_cons::Cuint
end

function helper_options(ctx, options, reshop_opts::Ptr{reshop_options})
solver_name = ""
solver_stack = ""
rhp_options = Dict{String, Union{Cdouble, Cint, Bool, Cstring, String}}()
for (name, value) in options
if string(name) == "solver"
sname = string(name)
if sname == "solver"
solver_name = value
elseif sname == "solver_stack"
solver_stack = value
else
res = reshop_option_set(reshop_opts, string(name), value)
res = reshop_option_set(reshop_opts, sname, value)
if (res != 0)
rhp_set_option(ctx, string(name), value)
rhp_set_option(ctx, sname, value)
end
rhp_options[string(name)] = value
rhp_options[sname] = value
end
end
return (solver_name, rhp_options)
return (solver_name, solver_stack, rhp_options)
end

function Optimizer(;options...)
Expand All @@ -105,15 +110,15 @@ function Optimizer(;options...)
# TODO this is quite a hack just for the "output" option.
# Refactoring option in ReSHOP will enable us to move on
reshop_opts = reshop_options_alloc()
solver_name, rhp_options = helper_options(ctx, options, reshop_opts)
solver_name, solver_stack, rhp_options = helper_options(ctx, options, reshop_opts)

model = Optimizer(0, MOI.FEASIBILITY_SENSE, 0,
Dict{MOI.ConstraintIndex{VOV, <: VLS}, Cuint}(), Dict{MOI.ConstraintIndex{VAF, <: VLS}, Cuint}(),
Dict{MOI.ConstraintIndex{MOI.ScalarQuadraticFunction{Float64}, <: SS}, Cuint}(),
Dict{MOI.ConstraintIndex{VOV, <: Union{MOI.SOS1{Float64}, MOI.SOS2{Float64}}}, Union{MOI.SOS1{Float64}, MOI.SOS2{Float64}}}(),
Dict{MOI.ConstraintIndex,String}(), Set{MOI.VariableIndex}(),
ctx, Ptr{context}(C_NULL), Ptr{reshop_model}(C_NULL), Ptr{reshop_model}(C_NULL),
reshop_opts, rhp_options, "", nothing, solver_name, -1, 0)
reshop_opts, rhp_options, "", nothing, solver_name, solver_stack, -1, 0)

finalizer(MOI.empty!, model)
return model
Expand Down Expand Up @@ -203,12 +208,22 @@ function MOI.optimize!(model::Optimizer)
end
end
# TODO check if gams_dir and ctx_dest already exists, do not reallocate then.
model.ctx_dest, model.gams_dir = reshop_setup_gams()

solver_stack = get_solverstack(model)

if solver_stack == "GAMS"
model.ctx_dest, model.gams_dir = reshop_setup_gams()
elseif solver_stack == "RESHOP"
model.ctx_dest = reshop_setup_ownsolver()
else
error("Unsupported solver stack $solver_stack")
end

# Calling from emp, we already have a mdl object
if model.mdl == C_NULL
model.mdl = reshop_alloc(model.ctx)
end

model.mdl_solver = reshop_alloc(model.ctx_dest)
model.status = reshop_solve(model.mdl, model.mdl_solver, model.ctx_dest, model.solver_name)
reshop_postprocess(model.mdl_solver)
Expand All @@ -223,6 +238,7 @@ function MOI.empty!(model::Optimizer)
model.mdl = C_NULL
reshop_free(model.mdl_solver)
model.mdl_solver = C_NULL

if model.ctx != nothing
ctx_dealloc(model.ctx)
model.ctx = ctx_alloc()
Expand Down
98 changes: 94 additions & 4 deletions src/MOI_wrapper/constraints.jl
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ function MOI.add_constraint(model::Optimizer,
avar = _ensure_avar(model)
rhp_avar_set(avar, vidx)
rhp_equ_add_linear(model.ctx, eidx, avar, coefs)
_set_rhs(model, eidx, func.constant, set)
_set_cst(model, eidx, func.constant, set)
# Add constraint to index.
return MOI.ConstraintIndex{typeof(func), typeof(set)}(eidx+1)
end
Expand All @@ -123,7 +123,7 @@ function MOI.add_constraint(model::Optimizer,
rhp_avar_set(avar, vidx)
# TODO(Xhub) change this when quadratic constrains are supported
rhp_equ_add_lin_tree(model.ctx, eidx, coefs, avar, 1.)
_set_rhs(model, eidx, func.constant, set)
_set_cst(model, eidx, func.constant, set)
# Add constraints to index.
ci = MOI.ConstraintIndex{typeof(func), typeof(set)}(eidx+1)
model.quadfn_mapping[ci] = eidx
Expand All @@ -149,7 +149,7 @@ function MOI.add_constraint(model::Optimizer,
end

for (idx, cst) in enumerate(func.constants)
_set_rhs(model, current_m-1+idx, cst, set)
_set_cst(model, current_m-1+idx, cst, set)
end

# Add constraints to index.
Expand Down Expand Up @@ -240,6 +240,97 @@ function MOI.add_constraint(model::Optimizer, vov::VOV, set::MOI.SecondOrderCone
return ci
end

function MOI.add_constraint(model::Optimizer, vaf::VAF, set::MOI.Complements)

# the vaf needs to be parsed
idxs, vars, coeffs = canonical_vector_affine_reduction(vaf)
# TODO: for 0.22, we must set dim to set.dimension/2
dim = set.dimension

rhp_vars = Vector{RHP_IDXT}(undef, dim)
equs = Vector{Tuple{Vector{RHP_IDXT},Vector{Float64}}}(undef, dim)

for i in 1:dim
equs[i] = (Vector{RHP_IDXT}(undef, 0), Vector{Float64}(undef, 0))
end

for (idx, var, coeff) in zip(idxs, vars, coeffs)
# past dim, we have the complementary variable
if idx >= dim
rhp_vars[idx-dim+1] = var
else
push!(equs[idx+1][1], var)
push!(equs[idx+1][2], coeff)
end
end

# Now we define all the equations
avar = _ensure_avar(model)
current_m = ctx_numequ(model.ctx)
# Add constraints inside ReSHOP.
rhp_add_equs(model.ctx, dim)

# Add all relations
for i in 1:dim
ei = current_m+i-1
vi = rhp_vars[i]
vis_fn, coeffs = equs[i]
rhp_avar_set(avar, vis_fn)
rhp_equ_add_linear(model.ctx, ei, avar, coeffs)
rhp_set_equasmapping(model.ctx, ei)
rhp_set_perp(model.ctx, ei, vi)
reshop_set_cst(model.ctx, ei, vaf.constants[i])
end


# Add constraints to index.
# The index of the t variable is used, since the multiplier
# would only be defined for it.
ci = MOI.ConstraintIndex{typeof(vaf), typeof(set)}(current_m+1)

if dim > 1
model.vaf_mapping[ci] = collect(current_m:current_m+dim-1)
end

return ci
end

function MOI.add_constraint(model::Optimizer, vov::VOV, set::MOI.Complements)

# TODO: for 0.22, we must set dim to set.dimension/2
dim = set.dimension

rhp_vars = RHP_IDXT[vi.value-1 for vi in vov.variables]

# Now we define all the equations
current_m = ctx_numequ(model.ctx)

# Add constraints inside ReSHOP
rhp_add_equs(model.ctx, dim)

# Add all relations
for i in 1:dim
ei = current_m+i-1
vi_fn = rhp_vars[i]
vi = rhp_vars[i+dim]
ctx_add_lin_var(model.ctx, ei, vi_fn, 1.)
rhp_set_equasmapping(model.ctx, ei)
rhp_set_perp(model.ctx, ei, vi)
end


# Add constraints to index.
# The index of the t variable is used, since the multiplier
# would only be defined for it.
ci = MOI.ConstraintIndex{VOV, typeof(set)}(current_m+1)

if dim > 1
model.vov_mapping[ci] = collect(current_m:current_m+dim-1)
end

return ci
end

function MOI.add_constraint(model::Optimizer, vov::VOV,
set::Union{MOI.SOS1{Float64}, MOI.SOS2{Float64}})
@assert length(vov.variables) == length(set.weights)
Expand Down Expand Up @@ -478,7 +569,6 @@ end
## Constraint naming
# TODO
function MOI.set(model::Optimizer, ::MOI.ConstraintName, ci::MOI.ConstraintIndex{<:SF,<:LS}, name::String)
println("saving constraint named $name at index $(ci.value)")
ctx_setequname(model.ctx, ci.value-1, name)
end

Expand Down
8 changes: 4 additions & 4 deletions src/MOI_wrapper/getters.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,9 +46,9 @@ _chk_citype = Dict(
)

_cone_moi_to_rhp = Dict(
MOI.LessThan{Float64} => CONE_R_MINUS,
MOI.GreaterThan{Float64} => CONE_R_PLUS,
MOI.EqualTo{Float64} => CONE_0
MOI.LessThan{Float64} => RHP_CONE_R_MINUS,
MOI.GreaterThan{Float64} => RHP_CONE_R_PLUS,
MOI.EqualTo{Float64} => RHP_CONE_0
)

function chk_equ_citype(ctx, eidx, ::Type{MOI.ConstraintIndex{MOI.SingleVariable, S}}) where S<:SS
Expand All @@ -60,7 +60,7 @@ end

function chk_equ_citype(ctx, eidx, ::Type{MOI.ConstraintIndex{F, S}}) where {F<:SF, S<:LS}
type, cone = reshop_get_equtype(ctx, eidx)
res = (type == 2) && (cone == get(_cone_moi_to_rhp, S, CONE_NONE))
res = (type == 2) && (cone == get(_cone_moi_to_rhp, S, RHP_CONE_NONE))
return res
end

Expand Down
7 changes: 1 addition & 6 deletions src/MOI_wrapper/nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -56,10 +56,6 @@ function load_nlp_constraints(model::Optimizer, nlp_data::MOI.NLPBlockData)
lin_part = Dict{Int32, Float64}()
c, constant, conlinearities = process_expression!(c, lin_part)

# Update bounds on constraint
lb -= constant
ub -= constant

eidx = rhp_add_equ(model.ctx)

tree, node = reshop_get_treedata(model.ctx, eidx)
Expand All @@ -71,8 +67,7 @@ function load_nlp_constraints(model::Optimizer, nlp_data::MOI.NLPBlockData)
rhp_equ_add_linear_chk(model.ctx, eidx, avar, collect(values(lin_part)))
end
reshop_set_equtype(model.ctx, eidx, relation_to_reshop[rel])
isfinite(lb) && reshop_set_rhs(model.ctx, eidx, lb)
isfinite(ub) && reshop_set_rhs(model.ctx, eidx, ub)
isfinite(constant) && reshop_set_cst(model.ctx, eidx, constant)
end

end
Expand Down
33 changes: 11 additions & 22 deletions src/MOI_wrapper/objective.jl
Original file line number Diff line number Diff line change
Expand Up @@ -4,38 +4,27 @@ MOI.get(model::Optimizer, ::MOI.ObjectiveSense) = model.sense

# Objective definition.
function add_objective!(model::Optimizer, objective::MOI.ScalarQuadraticFunction)
eidx = rhp_add_equ(model.ctx)
# We parse the expression passed in arguments.
qvidx1, qvidx2, qcoefs = canonical_quadratic_reduction(objective)
lvidx, lcoefs = canonical_linear_reduction(objective)
rhp_equ_add_quadratic(model.ctx, eidx, qvidx1, qvidx2, qcoefs)
avar = _ensure_avar(model)
rhp_avar_set(avar, lvidx)
rhp_equ_add_linear_chk(model.ctx, eidx, avar, lcoefs)
reshop_set_rhs(model.ctx, eidx, objective.constant)
rhp_set_objeqn(model.ctx, eidx)
return
avar = _ensure_avar(model)
eidx = rhp_addequ_nocst(model.ctx, avar, objective)
rhp_set_objeqn(model.ctx, eidx)
reshop_set_cst(model.ctx, eidx, objective.constant)
return
end

function add_objective!(model::Optimizer, objective::MOI.ScalarAffineFunction)
eidx = rhp_add_equ(model.ctx)
lvidx, lcoefs = canonical_linear_reduction(objective)
avar = _ensure_avar(model)
rhp_avar_set(avar, lvidx)
rhp_equ_add_linear(model.ctx, eidx, avar, lcoefs)
reshop_set_rhs(model.ctx, eidx, objective.constant)
rhp_set_objeqn(model.ctx, eidx)
return
avar = _ensure_avar(model)
eidx = rhp_addequ_nocst(model.ctx, avar, objective)
rhp_set_objeqn(model.ctx, eidx)
reshop_set_cst(model.ctx, eidx, objective.constant)
return
end

function add_objective!(model::Optimizer, var::MOI.SingleVariable)
check_inbounds(model, var)
# TODO(Xhub) because of ovf, we need to always add an equation
# rhp_set_objvar(model.ctx, var.variable.value - 1)
eidx = rhp_add_equ(model.ctx)
avar = _ensure_avar(model)
rhp_avar_set(avar, var.variable.value - 1)
rhp_equ_add_linear(model.ctx, eidx, avar, [1.,])
eidx = rhp_addequ_nocst(model.ctx, avar, var)
rhp_set_objeqn(model.ctx, eidx)
return
end
Expand Down
2 changes: 1 addition & 1 deletion src/MOI_wrapper/results.jl
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ end
function MOI.get(model::Optimizer, ::MOI.ConstraintPrimal,
ci::MOI.ConstraintIndex{S, T}) where {S <: VAF, T <: Union{MOI.Nonnegatives, MOI.Nonpositives}}
@checkcons(model, ci)
return [_get_equval(model.ctx, eidx) - ctx_getrhs(model.ctx, eidx) for eidx in model.vaf_mapping[ci]]
return [_get_equval(model.ctx, eidx) + ctx_getcst(model.ctx, eidx) for eidx in model.vaf_mapping[ci]]
end

function MOI.get(model::Optimizer, ::MOI.ConstraintPrimal,
Expand Down
2 changes: 2 additions & 0 deletions src/MOI_wrapper/supports.jl
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ MOI.supports_constraint(::Optimizer, ::Type{<:ALLV}, ::Type{<:NCS}) = true
MOI.supports_constraint(::Optimizer, ::Type{<:SF}, ::Type{<:LS}) = true
MOI.supports_constraint(::Optimizer, ::Type{VAF}, ::Type{<:VLS}) = true
MOI.supports_constraint(::Optimizer, ::Type{VOV}, ::Type{<:VLS}) = true
MOI.supports_constraint(::Optimizer, ::Type{VOV}, ::Type{MOI.Complements}) = true
MOI.supports_constraint(::Optimizer, ::Type{VAF}, ::Type{MOI.Complements}) = true

# TODO:
MOI.supports(::Optimizer, ::MOI.ConstraintName, ::Type{MOI.ConstraintIndex}) = false
Expand Down
Loading

2 comments on commit 73864ba

@xhub
Copy link
Owner Author

@xhub xhub commented on 73864ba Mar 14, 2022

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@JuliaRegistrator
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Registration pull request created: JuliaRegistries/General/56611

After the above pull request is merged, it is recommended that a tag is created on this repository for the registered package version.

This will be done automatically if the Julia TagBot GitHub Action is installed, or can be done manually through the github interface, or via:

git tag -a v0.1.3 -m "<description of version>" 73864ba41524881532e90895e1649aec3d782d8e
git push origin v0.1.3

Please sign in to comment.