From 3f57ba4bb3350701bf524750c936a4efb49d9622 Mon Sep 17 00:00:00 2001 From: "Documenter.jl" Date: Tue, 25 Jun 2024 00:24:39 +0000 Subject: [PATCH] build based on 258921a --- previews/PR151/Examples/benders/index.html | 2 +- previews/PR151/Examples/example/index.html | 37 +++----------------- previews/PR151/Examples/markowitz/index.html | 2 +- previews/PR151/index.html | 2 +- previews/PR151/manual/index.html | 2 +- previews/PR151/reference/index.html | 6 ++-- previews/PR151/search/index.html | 2 +- previews/PR151/search_index.js | 2 +- 8 files changed, 13 insertions(+), 42 deletions(-) diff --git a/previews/PR151/Examples/benders/index.html b/previews/PR151/Examples/benders/index.html index 06d337c..45bdbb0 100644 --- a/previews/PR151/Examples/benders/index.html +++ b/previews/PR151/Examples/benders/index.html @@ -203,4 +203,4 @@ return best_sol[1] end

Run benders decomposition with POI

β2 = decomposed_model(; print_timer_outputs = false);
 GC.gc()
-β2 = decomposed_model();
+β2 = decomposed_model(); diff --git a/previews/PR151/Examples/example/index.html b/previews/PR151/Examples/example/index.html index 2ba6a1d..3735119 100644 --- a/previews/PR151/Examples/example/index.html +++ b/previews/PR151/Examples/example/index.html @@ -1,5 +1,5 @@ -Basic Examples · ParametricOptInterface.jl

Basic Examples

MOI example - step by step usage

Let's write a step-by-step example of POI usage at the MOI level.

First, we declare a ParametricOptInterface.Optimizer on top of a MOI optimizer. In the example, we consider HiGHS as the underlying solver:

using HiGHS
+Basic Examples · ParametricOptInterface.jl

Basic Examples

MOI example - step by step usage

Let's write a step-by-step example of POI usage at the MOI level.

First, we declare a ParametricOptInterface.Optimizer on top of a MOI optimizer. In the example, we consider HiGHS as the underlying solver:

using HiGHS
 using MathOptInterface
 using ParametricOptInterface
 
@@ -129,7 +129,7 @@
     optimizer,
     MOI.ObjectiveFunction{MOI.ScalarQuadraticFunction{Float64}}(),
     obj_func,
-)

To multiply a parameter in a quadratic term, the user will need to use the POI.QuadraticObjectiveCoef model attribute.

p = first(MOI.add_constrained_variable.(optimizer, MOI.Parameter(1.0)))
+)

To multiply a parameter in a quadratic term, the user will need to use the POI.QuadraticObjectiveCoef model attribute.

p = first(MOI.add_constrained_variable.(optimizer, MOI.Parameter(1.0)))
 MOI.set(optimizer, POI.QuadraticObjectiveCoef(), (x,y), p)

This function will add the term p*xy to the objective function. It's also possible to multiply a scalar affine function to the quadratic term.

MOI.set(optimizer, POI.QuadraticObjectiveCoef(), (x,y), 2p+3)

This will set the term (2p+3)*xy to the objective function (it overwrites the last set). Then, just optimize the model.

MOI.optimize!(model)
 isapprox(MOI.get(model, MOI.ObjectiveValue()), 32/3, atol=1e-4)
 isapprox(MOI.get(model, MOI.VariablePrimal(), x), 4/3, atol=1e-4)
@@ -150,40 +150,11 @@
 @variable(model, p in MOI.Parameter(1.0))
 @constraint(model, 2x + y <= 4)
 @constraint(model, x + 2y <= 4)
-@objective(model, Max, (x^2 + y^2)/2)

We use the same MOI function to add the parameter multiplied to the quadratic term.

MOI.set(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)), 2index(p)+3)

If the user print the model, the term (2p+3)*xy won't show. It's possible to retrieve the parametric function multiplying the term xy with MOI.get.

MOI.get(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)))

Then, just optimize the model

optimize!(model)
+@objective(model, Max, (x^2 + y^2)/2)

We use the same MOI function to add the parameter multiplied to the quadratic term.

MOI.set(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)), 2index(p)+3)

If the user print the model, the term (2p+3)*xy won't show. It's possible to retrieve the parametric function multiplying the term xy with MOI.get.

MOI.get(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)))

Then, just optimize the model

optimize!(model)
 isapprox(objective_value(model), 32/3, atol=1e-4)
 isapprox(value(x), 4/3, atol=1e-4)
 isapprox(value(y), 4/3, atol=1e-4)

To change the parameter just set POI.ParameterValue and optimize again.

MOI.set(model, POI.ParameterValue(), p, 2.0)
 optimize!(model)
 isapprox(objective_value(model), 128/9, atol=1e-4)
 isapprox(value(x), 4/3, atol=1e-4)
-isapprox(value(y), 4/3, atol=1e-4)

JuMP Example - Non Linear Programming (NLP)

POI currently works with NLPs when users wish to add the parameters to the non-NL constraints or objective. This means that POI works with models like this one:

@variable(model, x)
-@variable(model, y)
-@variable(model, z in MOI.Parameter(10))
-@constraint(model, x + y >= z)
-@NLobjective(model, Min, x^2 + y^2)

but does not work with models that have parameters on the NL expressions like this one:

@variable(model, x)
-@variable(model, y)
-@variable(model, z in MOI.Parameter(10))
-@constraint(model, x + y >= z)
-@NLobjective(model, Min, x^2 + y^2 + z) # There is a parameter here

If users with to add parameters in NL expressions we strongly recommend them to read this section on the JuMP documentation

Although POI works with NLPs there are some important information for users to keep in mind. All come from the fact that POI relies on the MOI interface for problem modifications and these are not common on NLP solvers, most solvers only allow users to modify variable bounds using their official APIs. This means that if users wish to make modifications on some constraint that is not a variable bound we are not allowed to call MOI.modify because the function is not supported in the MOI solver interface. The work-around to this is defining a POI.Optimizer on a caching optimizer:

ipopt = Ipopt.Optimizer()
-MOI.set(ipopt, MOI.RawOptimizerAttribute("print_level"), 0)
-cached =
-    () -> MOI.Bridges.full_bridge_optimizer(
-        MOIU.CachingOptimizer(
-            MOIU.UniversalFallback(MOIU.Model{Float64}()),
-            ipopt,
-        ),
-        Float64,
-    )
-POI_cached_optimizer() = POI.Optimizer(cached())
-model = Model(() -> POI_cached_optimizer())
-@variable(model, x)
-@variable(model, y)
-@variable(model, z in MOI.Parameter(10))
-@constraint(model, x + y >= z)
-@NLobjective(model, Min, x^2 + y^2)

This works but keep in mind that the model has an additional layer of between the solver and the POI.Optimizer. This will make most operations slower than with the version without the caching optimizer. Keep in mind that since the official APIs of most solvers don't allow for modifications on linear constraints there should have no big difference between making a modification using POI or re-building the model from scratch.

If users wish to make modifications on variable bounds the POI interface will help you save time between solves. In this case you should use the ParametricOptInterface.ConstraintsInterpretation as we do in this example:

model = Model(() -> POI.Optimizer(Ipopt.Optimizer()))
-@variable(model, x)
-@variable(model, z in MOI.Parameter(10))
-MOI.set(model, POI.ConstraintsInterpretation(), POI.ONLY_BOUNDS)
-@constraint(model, x >= z)
-@NLobjective(model, Min, x^2)

This use case should help users diminsh the time of making model modifications and re-solve the model. To increase the performance users that are familiar with JuMP direct mode can also use it.

+isapprox(value(y), 4/3, atol=1e-4)
diff --git a/previews/PR151/Examples/markowitz/index.html b/previews/PR151/Examples/markowitz/index.html index 9a7ce33..dd46dbc 100644 --- a/previews/PR151/Examples/markowitz/index.html +++ b/previews/PR151/Examples/markowitz/index.html @@ -62,4 +62,4 @@ end

Plot the efficient frontier

portfolios_values = sort(portfolios_values,by=x->x[1])
 portfolios_values_matrix = hcat([[v[1],v[2]] for v in values(portfolios_values)]...)'
 plot(portfolios_values_matrix[:,2],portfolios_values_matrix[:,1],legend=false,
-xlabel="Standard Deviation", ylabel = "Return", title = "Efficient Frontier")
+xlabel="Standard Deviation", ylabel = "Return", title = "Efficient Frontier") diff --git a/previews/PR151/index.html b/previews/PR151/index.html index 2b6b486..0172cb9 100644 --- a/previews/PR151/index.html +++ b/previews/PR151/index.html @@ -1,2 +1,2 @@ -Home · ParametricOptInterface.jl
+Home · ParametricOptInterface.jl
diff --git a/previews/PR151/manual/index.html b/previews/PR151/manual/index.html index 6260b16..f1c886d 100644 --- a/previews/PR151/manual/index.html +++ b/previews/PR151/manual/index.html @@ -4,4 +4,4 @@ const POI = ParametricOptInterface const MOI = MathOptInterface # Define a Optimizer on top of the MOI optimizer -optimizer = POI.Optimizer(HiGHS.Optimizer())

Parameters

A MOI.Parameter is a set used to define a variable with a fixed value that can be changed by the user. It is analogous to MOI.EqualTo, but can be used by special methods like the ones in this package to remove the fixed variable from the optimization problem. This permits the usage of multiplicative parameters in lienar models and might speedup solves since the number of variables is reduced.

Adding a new parameter to a model

To add a parameter to a model, we must use the MOI.add_constrained_variable() function, passing as its arguments the model and a MOI.Parameter with its given value:

y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))

Changing the parameter value

To change a given parameter's value, access its VariableIndex and set it to the new value using the MOI.Parameter structure.

MOI.set(optimizer, POI.ParameterValue(), y, MOI.Parameter(2.0))

Retrieving the dual of a parameter

Given an optimized model, one can compute the dual associated to a parameter, as long as it is an additive term in the constraints or objective. One can do so by getting the MOI.ConstraintDual attribute of the parameter's MOI.ConstraintIndex:

MOI.get(optimizer, POI.ParameterDual(), y)
+optimizer = POI.Optimizer(HiGHS.Optimizer())

Parameters

A MOI.Parameter is a set used to define a variable with a fixed value that can be changed by the user. It is analogous to MOI.EqualTo, but can be used by special methods like the ones in this package to remove the fixed variable from the optimization problem. This permits the usage of multiplicative parameters in lienar models and might speedup solves since the number of variables is reduced.

Adding a new parameter to a model

To add a parameter to a model, we must use the MOI.add_constrained_variable() function, passing as its arguments the model and a MOI.Parameter with its given value:

y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))

Changing the parameter value

To change a given parameter's value, access its VariableIndex and set it to the new value using the MOI.Parameter structure.

MOI.set(optimizer, POI.ParameterValue(), y, MOI.Parameter(2.0))

Retrieving the dual of a parameter

Given an optimized model, one can compute the dual associated to a parameter, as long as it is an additive term in the constraints or objective. One can do so by getting the MOI.ConstraintDual attribute of the parameter's MOI.ConstraintIndex:

MOI.get(optimizer, POI.ParameterDual(), y)
diff --git a/previews/PR151/reference/index.html b/previews/PR151/reference/index.html index 89c9db4..7b411e2 100644 --- a/previews/PR151/reference/index.html +++ b/previews/PR151/reference/index.html @@ -1,6 +1,6 @@ Reference · ParametricOptInterface.jl

Reference

ParametricOptInterface.ConstraintsInterpretationType
ConstraintsInterpretation <: MOI.AbstractOptimizerAttribute

Attribute to define how POI.Optimizer should interpret constraints.

  • POI.ONLY_CONSTRAINTS: Only interpret ScalarAffineFunction constraints as linear constraints If an expression such as x >= p1 + p2 appears it will be trated like a new constraint. This is the default behaviour of POI.Optimizer

  • POI.ONLY_BOUNDS: Only interpret ScalarAffineFunction constraints as a variable bound. This is valid for constraints such as x >= p or x >= p1 + p2. If a constraint x1 + x2 >= p appears, which is not a valid variable bound it will throw an error.

  • POI.BOUNDS_AND_CONSTRAINTS: Interpret ScalarAffineFunction constraints as a variable bound if they are a valid variable bound, i.e., x >= p or x >= p1 + p2 and interpret them as linear constraints otherwise.

Example

MOI.set(model, POI.InterpretConstraintsAsBounds(), POI.ONLY_BOUNDS)
 MOI.set(model, POI.InterpretConstraintsAsBounds(), POI.ONLY_CONSTRAINTS)
-MOI.set(model, POI.InterpretConstraintsAsBounds(), POI.BOUNDS_AND_CONSTRAINTS)
source
ParametricOptInterface.OptimizerType
Optimizer{T, OT <: MOI.ModelLike} <: MOI.AbstractOptimizer

Declares a Optimizer, which allows the handling of parameters in a optimization model.

Keyword arguments

  • evaluate_duals::Bool: If true, evaluates the dual of parameters. Users might want to set it to false to increase performance when the duals of parameters are not necessary. Defaults to true.

  • save_original_objective_and_constraints: If true saves the orginal function and set of the constraints as well as the original objective function inside POI.Optimizer. This is useful for printing the model but greatly increases the memory footprint. Users might want to set it to false to increase performance in applications where you don't need to query the original expressions provided to the model in constraints or in the objective. Note that this might break printing or queries such as MOI.get(model, MOI.ConstraintFunction(), c). Defaults to true.

Example

julia> ParametricOptInterface.Optimizer(GLPK.Optimizer())
-ParametricOptInterface.Optimizer{Float64,GLPK.Optimizer}
source
ParametricOptInterface.ParameterValueType
ParameterValue <: MOI.AbstractVariableAttribute

Attribute defined to set and get parameter values

Example

MOI.set(model, POI.ParameterValue(), p, 2.0)
-MOI.get(model, POI.ParameterValue(), p)
source
+MOI.set(model, POI.InterpretConstraintsAsBounds(), POI.BOUNDS_AND_CONSTRAINTS)source
ParametricOptInterface.OptimizerType
Optimizer{T, OT <: MOI.ModelLike} <: MOI.AbstractOptimizer

Declares a Optimizer, which allows the handling of parameters in a optimization model.

Keyword arguments

  • evaluate_duals::Bool: If true, evaluates the dual of parameters. Users might want to set it to false to increase performance when the duals of parameters are not necessary. Defaults to true.

  • save_original_objective_and_constraints: If true saves the orginal function and set of the constraints as well as the original objective function inside POI.Optimizer. This is useful for printing the model but greatly increases the memory footprint. Users might want to set it to false to increase performance in applications where you don't need to query the original expressions provided to the model in constraints or in the objective. Note that this might break printing or queries such as MOI.get(model, MOI.ConstraintFunction(), c). Defaults to true.

Example

julia> ParametricOptInterface.Optimizer(GLPK.Optimizer())
+ParametricOptInterface.Optimizer{Float64,GLPK.Optimizer}
source
ParametricOptInterface.ParameterDualType
ParameterDual <: MOI.AbstractVariableAttribute

Attribute defined to get the dual values associated to parameters

Example

MOI.get(model, POI.ParameterValue(), p)
source
ParametricOptInterface.ParameterValueType
ParameterValue <: MOI.AbstractVariableAttribute

Attribute defined to set and get parameter values

Example

MOI.set(model, POI.ParameterValue(), p, 2.0)
+MOI.get(model, POI.ParameterValue(), p)
source
diff --git a/previews/PR151/search/index.html b/previews/PR151/search/index.html index 6d103c4..e2abbf8 100644 --- a/previews/PR151/search/index.html +++ b/previews/PR151/search/index.html @@ -1,2 +1,2 @@ -Search · ParametricOptInterface.jl

Loading search...

    +Search · ParametricOptInterface.jl

    Loading search...

      diff --git a/previews/PR151/search_index.js b/previews/PR151/search_index.js index 8923fbc..3481a06 100644 --- a/previews/PR151/search_index.js +++ b/previews/PR151/search_index.js @@ -1,3 +1,3 @@ var documenterSearchIndex = {"docs": -[{"location":"manual/#Manual","page":"Manual","title":"Manual","text":"","category":"section"},{"location":"manual/#Why-use-parameters?","page":"Manual","title":"Why use parameters?","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"A typical optimization model built using MathOptInterface.jl (MOIfor short) has two main components:","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"Variables\nConstants","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"Using these basic elements, one can create functions and sets that, together, form the desired optimization model. The goal of POI is the implementation of a third type, parameters, which","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"are declared similar to a variable, and inherits some functionalities (e.g. dual calculation)\nacts like a constant, in the sense that it has a fixed value that will remain the same unless explicitely changed by the user","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"A main concern is to efficiently implement this new type, as one typical usage is to change its value to analyze the model behavior, without the need to build a new one from scratch.","category":"page"},{"location":"manual/#How-it-works","page":"Manual","title":"How it works","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"The main idea applied in POI is that the interaction between the solver, e.g. GLPK, and the optimization model will be handled by MOI as usual. Because of that, POI is a higher level wrapper around MOI, responsible for receiving variables, constants and parameters, and forwarding to the lower level model only variables and constants.","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"As POI receives parameters, it must analyze and decide how they should be handled on the lower level optimization model (the MOI model).","category":"page"},{"location":"manual/#Usage","page":"Manual","title":"Usage","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"In this manual we describe how to interact with the optimization model at the MOI level. In the Examples section you can find some tutorials with the JuMP usage.","category":"page"},{"location":"manual/#Supported-constraints","page":"Manual","title":"Supported constraints","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"This is a list of supported MOI constraint functions that can handle parameters. If you try to add a parameter to a function that is not listed here, it will return an unsupported error.","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI Function\nScalarAffineFunction\nScalarQuadraticFunction\nVectorAffineFunction","category":"page"},{"location":"manual/#Supported-objective-functions","page":"Manual","title":"Supported objective functions","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI Function\nScalarAffineFunction\nScalarQuadraticFunction","category":"page"},{"location":"manual/#Declare-a-Optimizer","page":"Manual","title":"Declare a Optimizer","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"In order to use parameters, the user needs to declare a ParametricOptInterface.Optimizer on top of a MOI optimizer, such as HiGHS.Optimizer().","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"using ParametricOptInterface, MathOptInterface, HiGHS\n# Rename ParametricOptInterface and MathOptInterface to simplify the code\nconst POI = ParametricOptInterface\nconst MOI = MathOptInterface\n# Define a Optimizer on top of the MOI optimizer\noptimizer = POI.Optimizer(HiGHS.Optimizer())","category":"page"},{"location":"manual/#Parameters","page":"Manual","title":"Parameters","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"A MOI.Parameter is a set used to define a variable with a fixed value that can be changed by the user. It is analogous to MOI.EqualTo, but can be used by special methods like the ones in this package to remove the fixed variable from the optimization problem. This permits the usage of multiplicative parameters in lienar models and might speedup solves since the number of variables is reduced.","category":"page"},{"location":"manual/#Adding-a-new-parameter-to-a-model","page":"Manual","title":"Adding a new parameter to a model","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"To add a parameter to a model, we must use the MOI.add_constrained_variable() function, passing as its arguments the model and a MOI.Parameter with its given value:","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))","category":"page"},{"location":"manual/#Changing-the-parameter-value","page":"Manual","title":"Changing the parameter value","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"To change a given parameter's value, access its VariableIndex and set it to the new value using the MOI.Parameter structure.","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI.set(optimizer, POI.ParameterValue(), y, MOI.Parameter(2.0))","category":"page"},{"location":"manual/#Retrieving-the-dual-of-a-parameter","page":"Manual","title":"Retrieving the dual of a parameter","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"Given an optimized model, one can compute the dual associated to a parameter, as long as it is an additive term in the constraints or objective. One can do so by getting the MOI.ConstraintDual attribute of the parameter's MOI.ConstraintIndex:","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI.get(optimizer, POI.ParameterDual(), y)","category":"page"},{"location":"Examples/example/#Basic-Examples","page":"Basic Examples","title":"Basic Examples","text":"","category":"section"},{"location":"Examples/example/#MOI-example-step-by-step-usage","page":"Basic Examples","title":"MOI example - step by step usage","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's write a step-by-step example of POI usage at the MOI level.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"First, we declare a ParametricOptInterface.Optimizer on top of a MOI optimizer. In the example, we consider HiGHS as the underlying solver:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing MathOptInterface\nusing ParametricOptInterface\n\nconst MOI = MathOptInterface\nconst POI = ParametricOptInterface\n\noptimizer = POI.Optimizer(HiGHS.Optimizer())","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We declare the variable x as in a typical MOI model, and we add a non-negativity constraint:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"x = MOI.add_variables(optimizer, 2)\nfor x_i in x\n MOI.add_constraint(optimizer, x_i, MOI.GreaterThan(0.0))\nend","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Now, let's consider 3 MOI.Parameter. Two of them, y, z, will be placed in the constraints and one, w, in the objective function. We'll start all three of them with a value equal to 0:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"w, cw = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))\ny, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))\nz, cz = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's add the constraints. Notice that we treat parameters and variables in the same way when building the functions that will be placed in some set to create a constraint (Function-in-Set):","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"cons1 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([2.0, 1.0, 3.0], [x[1], x[2], y]), 0.0)\nci1 = MOI.add_constraint(optimizer, cons1, MOI.LessThan(4.0))\ncons2 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([1.0, 2.0, 0.5], [x[1], x[2], z]), 0.0)\nci2 = MOI.add_constraint(optimizer, cons2, MOI.LessThan(4.0))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Finally, we declare and add the objective function, with its respective sense:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"obj_func = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([4.0, 3.0, 2.0], [x[1], x[2], w]), 0.0)\nMOI.set(optimizer, MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), obj_func)\nMOI.set(optimizer, MOI.ObjectiveSense(), MOI.MAX_SENSE)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Now we can optimize the model and assess its termination and primal status:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.optimize!(optimizer)\nMOI.get(optimizer, MOI.TerminationStatus())\nMOI.get(optimizer, MOI.PrimalStatus())","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Given the optimized solution, we check that its value is, as expected, equal to 28/3, and the solution vector x is [4/3, 4/3]:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 28/3, atol = 1e-4)\nisapprox(MOI.get(optimizer, MOI.VariablePrimal(), x[1]), 4/3, atol = 1e-4)\nisapprox(MOI.get(optimizer, MOI.VariablePrimal(), x[2]), 4/3, atol = 1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We can also retrieve the dual values associated to each parameter, as they are all additive:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.get(optimizer, MOI.ConstraintDual(), cy)\nMOI.get(optimizer, MOI.ConstraintDual(), cz)\nMOI.get(optimizer, MOI.ConstraintDual(), cw)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Notice the direct relationship in this case between the parameters' duals and the associated constraints' duals. The y parameter, for example, only appears in the cons1. If we compare their duals, we can check that the dual of y is equal to its coefficient in cons1 multiplied by the constraint's dual itself, as expected:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(MOI.get(optimizer, MOI.ConstraintDual(), cy), 3*MOI.get(optimizer, MOI.ConstraintDual(), ci1), atol = 1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The same is valid for the remaining parameters. In case a parameter appears in more than one constraint, or both some constraints and in the objective function, its dual will be equal to the linear combination of the functions' duals multiplied by the respective coefficients.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"So far, we only added some parameters that had no influence at first in solving the model. Let's change the values associated to each parameter to assess its implications. First, we set the value of parameters y and z to 1.0. Notice that we are changing the feasible set of the decision variables:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(optimizer, POI.ParameterValue(), y, 1.0)\nMOI.set(optimizer, POI.ParameterValue(), z, 1.0)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"However, if we check the optimized model now, there will be no changes in the objective function value or the in the optimized decision variables:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox.(MOI.get(optimizer, MOI.ObjectiveValue()), 28/3, atol = 1e-4)\nisapprox.(MOI.get(optimizer, MOI.VariablePrimal(), x[1]), 4/3, atol = 1e-4)\nisapprox.(MOI.get(optimizer, MOI.VariablePrimal(), x[2]), 4/3, atol = 1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Although we changed the parameter values, we didn't optimize the model yet. Thus, to apply the parameters' changes, the model must be optimized again:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.optimize!(optimizer)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The MOI.optimize!() function handles the necessary updates, properly fowarding the new outer model (POI model) additions to the inner model (MOI model) which will be handled by the solver. Now we can assess the updated optimized information:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox.(MOI.get(optimizer, MOI.ObjectiveValue()), 3.0, atol = 1e-4)\nMOI.get.(optimizer, MOI.VariablePrimal(), x) == [0.0, 1.0]","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If we update the parameter w, associated to the objective function, we are simply adding a constant to it. Notice how the new objective function is precisely equal to the previous one plus the new value of w. In addition, as we didn't update the feasible set, the optimized decision variables remain the same.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(optimizer, POI.ParameterValue(), w, 2.0)\n# Once again, the model must be optimized to incorporate the changes\nMOI.optimize!(optimizer)\n# Only the objective function value changes\nisapprox.(MOI.get(optimizer, MOI.ObjectiveValue()), 7.0, atol = 1e-4)\nMOI.get.(optimizer, MOI.VariablePrimal(), x) == [0.0, 1.0]","category":"page"},{"location":"Examples/example/#JuMP-Example-step-by-step-usage","page":"Basic Examples","title":"JuMP Example - step by step usage","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's write a step-by-step example of POI usage at the JuMP level.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"First, we declare a Model on top of a Optimizer of an underlying solver. In the example, we consider HiGHS as the underlying solver:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\n\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = Model(() -> ParametricOptInterface.Optimizer(HiGHS.Optimizer()))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We declare the variable x as in a typical JuMP model:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@variable(model, x[i = 1:2] >= 0)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Now, let's consider 3 MOI.Parameter. Two of them, y, z, will be placed in the constraints and one, w, in the objective function. We'll start all three of them with a value equal to 0:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@variable(model, y in MOI.Parameter(0.0))\n@variable(model, z in MOI.Parameter(0.0))\n@variable(model, w in MOI.Parameter(0.0))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's add the constraints. Notice that we treat parameters the same way we treat variables when writing the model:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@constraint(model, c1, 2x[1] + x[2] + 3y <= 4)\n@constraint(model, c2, x[1] + 2x[2] + 0.5z <= 4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Finally, we declare and add the objective function, with its respective sense:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@objective(model, Max, 4x[1] + 3x[2] + 2w)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We can optimize the model and assess its termination and primal status:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"optimize!(model)\ntermination_status(model)\nprimal_status(model)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Given the optimized solution, we check that its value is, as expected, equal to 28/3, and the solution vector x is [4/3, 4/3]:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(objective_value(model), 28/3)\nisapprox(value.(x), [4/3, 4/3])","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We can also retrieve the dual values associated to each parameter, as they are all additive:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.get(model, POI.ParameterDual(), y)\nMOI.get(model, POI.ParameterDual(), z)\nMOI.get(model, POI.ParameterDual(), w)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Notice the direct relationship in this case between the parameters' duals and the associated constraints' duals. The y parameter, for example, only appears in the c1. If we compare their duals, we can check that the dual of y is equal to its coefficient in c1 multiplied by the constraint's dual itself, as expected:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"dual_of_y = MOI.get(model, POI.ParameterDual(), y)\nisapprox(dual_of_y, 3 * dual(c1))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The same is valid for the remaining parameters. In case a parameter appears in more than one constraint, or both some constraints and in the objective function, its dual will be equal to the linear combination of the functions' duals multiplied by the respective coefficients.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"So far, we only added some parameters that had no influence at first in solving the model. Let's change the values associated to each parameter to assess its implications. First, we set the value of parameters y and z to 1.0. Notice that we are changing the feasible set of the decision variables:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), y, 1)\nMOI.set(model, POI.ParameterValue(), z, 1)\n# We can also query the value in the parameters\nMOI.get(model, POI.ParameterValue(), y)\nMOI.get(model, POI.ParameterValue(), z)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To apply the parameters' changes, the model must be optimized again:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"optimize!(model)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The optimize! function handles the necessary updates, properly fowarding the new outer model (POI model) additions to the inner model (MOI model) which will be handled by the solver. Now we can assess the updated optimized information:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(objective_value(model), 3)\nisapprox(value.(x), [0, 1])","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If we update the parameter w, associated to the objective function, we are simply adding a constant to it. Notice how the new objective function is precisely equal to the previous one plus the new value of w. In addition, as we didn't update the feasible set, the optimized decision variables remain the same.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), w, 2)\n# Once again, the model must be optimized to incorporate the changes\noptimize!(model)\n# Only the objective function value changes\nisapprox(objective_value(model), 7)\nisapprox(value.(x), [0, 1])","category":"page"},{"location":"Examples/example/#JuMP-Example-Declaring-vectors-of-parameters","page":"Basic Examples","title":"JuMP Example - Declaring vectors of parameters","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Many times it is useful to declare a vector of parameters just like we declare a vector of variables, the JuMP syntax for variables works with parameters too:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = Model(() -> ParametricOptInterface.Optimizer(HiGHS.Optimizer()))\n@variable(model, x[i = 1:3] >= 0)\n@variable(model, p1[i = 1:3] in MOI.Parameter(0.0))\n@variable(model, p2[i = 1:3] in MOI.Parameter.([1, 10, 45]))\n@variable(model, p3[i = 1:3] in MOI.Parameter.(ones(3)))","category":"page"},{"location":"Examples/example/#JuMP-Example-Dealing-with-parametric-expressions-as-variable-bounds","page":"Basic Examples","title":"JuMP Example - Dealing with parametric expressions as variable bounds","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"A very common pattern that appears when using ParametricOptInterface is to add variable and later add some expression with parameters that represent the variable bound. The following code illustrates the pattern:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = direct_model(POI.Optimizer(HiGHS.Optimizer()))\n@variable(model, x)\n@variable(model, p in MOI.Parameter(0.0))\n@constraint(model, x >= p)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Since parameters are treated like variables JuMP lowers this to MOI as x - p >= 0 which is not a variable bound but a linear constraint.This means that the current representation of this problem at the solver level is:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"beginalign\n min_x 0\n \n textst x in mathbbR \n x - p geq 0\nendalign","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This behaviour might be undesirable because it creates extra rows in your problem. Users can set the ParametricOptInterface.ConstraintsInterpretation to control how the linear constraints should be interpreted. The pattern advised for users seeking the most performance out of ParametricOptInterface should use the followig pattern:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = direct_model(POI.Optimizer(HiGHS.Optimizer()))\n@variable(model, x)\n@variable(model, p in MOI.Parameter(0.0))\n\n# Indicate that all the new constraints will be valid variable bounds\nMOI.set(model, POI.ConstraintsInterpretation(), POI.ONLY_BOUNDS)\n@constraint(model, x >= p)\n# The name of this constraint was different to inform users that this is a\n# variable bound.\n\n# Indicate that all the new constraints will not be variable bounds\nMOI.set(model, POI.ConstraintsInterpretation(), POI.ONLY_CONSTRAINTS)\n# @constraint(model, ...)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This way the mathematical representation of the problem will be:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"beginalign\n min_x 0\n \n textst x geq p\nendalign","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"which might lead to faster solves.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Users that just want everything to work can use the default value POI.ONLY_CONSTRAINTS or try to use POI.BOUNDS_AND_CONSTRAINTS and leave it to ParametricOptInterface to interpret the constraints as bounds when applicable and linear constraints otherwise.","category":"page"},{"location":"Examples/example/#MOI-Example-Parameters-multiplying-Quadratic-terms","page":"Basic Examples","title":"MOI Example - Parameters multiplying Quadratic terms","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's start with a simple quadratic problem","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using Ipopt\nusing MathOptInterface\nusing ParametricOptInterface\n\nconst MOI = MathOptInterface\nconst POI = ParametricOptInterface\n\noptimizer = POI.Optimizer(Ipopt.Optimizer())\n\nx = MOI.add_variable(optimizer)\ny = MOI.add_variable(optimizer)\nMOI.add_constraint(optimizer, x, MOI.GreaterThan(0.0))\nMOI.add_constraint(optimizer, y, MOI.GreaterThan(0.0))\n\ncons1 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([2.0, 1.0], [x, y]), 0.0)\nci1 = MOI.add_constraint(optimizer, cons1, MOI.LessThan(4.0))\ncons2 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([1.0, 2.0], [x, y]), 0.0)\nci2 = MOI.add_constraint(optimizer, cons2, MOI.LessThan(4.0))\n\nMOI.set(optimizer, MOI.ObjectiveSense(), MOI.MAX_SENSE)\nobj_func = MOI.ScalarQuadraticFunction(\n [MOI.ScalarQuadraticTerm(1.0, x, x)\n MOI.ScalarQuadraticTerm(1.0, y, y)],\n MOI.ScalarAffineTerm{Float64}[],\n 0.0,\n)\nMOI.set(\n optimizer,\n MOI.ObjectiveFunction{MOI.ScalarQuadraticFunction{Float64}}(),\n obj_func,\n)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To multiply a parameter in a quadratic term, the user will need to use the POI.QuadraticObjectiveCoef model attribute.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"p = first(MOI.add_constrained_variable.(optimizer, MOI.Parameter(1.0)))\nMOI.set(optimizer, POI.QuadraticObjectiveCoef(), (x,y), p)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This function will add the term p*xy to the objective function. It's also possible to multiply a scalar affine function to the quadratic term.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(optimizer, POI.QuadraticObjectiveCoef(), (x,y), 2p+3)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This will set the term (2p+3)*xy to the objective function (it overwrites the last set). Then, just optimize the model.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.optimize!(model)\nisapprox(MOI.get(model, MOI.ObjectiveValue()), 32/3, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), x), 4/3, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), y), 4/3, atol=1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To change the parameter just set POI.ParameterValue and optimize again.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), p, 2.0)\nMOI.optimize!(model)\nisapprox(MOI.get(model, MOI.ObjectiveValue()), 128/9, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), x), 4/3, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), y), 4/3, atol=1e-4)","category":"page"},{"location":"Examples/example/#JuMP-Example-Parameters-multiplying-Quadratic-terms","page":"Basic Examples","title":"JuMP Example - Parameters multiplying Quadratic terms","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's get the same MOI example","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using Ipopt\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\noptimizer = POI.Optimizer(Ipopt.Optimizer())\nmodel = direct_model(optimizer)\n\n@variable(model, x >= 0)\n@variable(model, y >= 0)\n@variable(model, p in MOI.Parameter(1.0))\n@constraint(model, 2x + y <= 4)\n@constraint(model, x + 2y <= 4)\n@objective(model, Max, (x^2 + y^2)/2)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We use the same MOI function to add the parameter multiplied to the quadratic term.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)), 2index(p)+3)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If the user print the model, the term (2p+3)*xy won't show. It's possible to retrieve the parametric function multiplying the term xy with MOI.get.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.get(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Then, just optimize the model","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"optimize!(model)\nisapprox(objective_value(model), 32/3, atol=1e-4)\nisapprox(value(x), 4/3, atol=1e-4)\nisapprox(value(y), 4/3, atol=1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To change the parameter just set POI.ParameterValue and optimize again.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), p, 2.0)\noptimize!(model)\nisapprox(objective_value(model), 128/9, atol=1e-4)\nisapprox(value(x), 4/3, atol=1e-4)\nisapprox(value(y), 4/3, atol=1e-4)","category":"page"},{"location":"Examples/example/#JuMP-Example-Non-Linear-Programming-(NLP)","page":"Basic Examples","title":"JuMP Example - Non Linear Programming (NLP)","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"POI currently works with NLPs when users wish to add the parameters to the non-NL constraints or objective. This means that POI works with models like this one:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@variable(model, x)\n@variable(model, y)\n@variable(model, z in MOI.Parameter(10))\n@constraint(model, x + y >= z)\n@NLobjective(model, Min, x^2 + y^2)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"but does not work with models that have parameters on the NL expressions like this one:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@variable(model, x)\n@variable(model, y)\n@variable(model, z in MOI.Parameter(10))\n@constraint(model, x + y >= z)\n@NLobjective(model, Min, x^2 + y^2 + z) # There is a parameter here","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If users with to add parameters in NL expressions we strongly recommend them to read this section on the JuMP documentation","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Although POI works with NLPs there are some important information for users to keep in mind. All come from the fact that POI relies on the MOI interface for problem modifications and these are not common on NLP solvers, most solvers only allow users to modify variable bounds using their official APIs. This means that if users wish to make modifications on some constraint that is not a variable bound we are not allowed to call MOI.modify because the function is not supported in the MOI solver interface. The work-around to this is defining a POI.Optimizer on a caching optimizer:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"ipopt = Ipopt.Optimizer()\nMOI.set(ipopt, MOI.RawOptimizerAttribute(\"print_level\"), 0)\ncached =\n () -> MOI.Bridges.full_bridge_optimizer(\n MOIU.CachingOptimizer(\n MOIU.UniversalFallback(MOIU.Model{Float64}()),\n ipopt,\n ),\n Float64,\n )\nPOI_cached_optimizer() = POI.Optimizer(cached())\nmodel = Model(() -> POI_cached_optimizer())\n@variable(model, x)\n@variable(model, y)\n@variable(model, z in MOI.Parameter(10))\n@constraint(model, x + y >= z)\n@NLobjective(model, Min, x^2 + y^2)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This works but keep in mind that the model has an additional layer of between the solver and the POI.Optimizer. This will make most operations slower than with the version without the caching optimizer. Keep in mind that since the official APIs of most solvers don't allow for modifications on linear constraints there should have no big difference between making a modification using POI or re-building the model from scratch.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If users wish to make modifications on variable bounds the POI interface will help you save time between solves. In this case you should use the ParametricOptInterface.ConstraintsInterpretation as we do in this example:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"model = Model(() -> POI.Optimizer(Ipopt.Optimizer()))\n@variable(model, x)\n@variable(model, z in MOI.Parameter(10))\nMOI.set(model, POI.ConstraintsInterpretation(), POI.ONLY_BOUNDS)\n@constraint(model, x >= z)\n@NLobjective(model, Min, x^2)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This use case should help users diminsh the time of making model modifications and re-solve the model. To increase the performance users that are familiar with JuMP direct mode can also use it.","category":"page"},{"location":"reference/#Reference","page":"Reference","title":"Reference","text":"","category":"section"},{"location":"reference/","page":"Reference","title":"Reference","text":"ParametricOptInterface.ConstraintsInterpretation\nParametricOptInterface.Optimizer\nParametricOptInterface.ParameterDual\nParametricOptInterface.ParameterValue","category":"page"},{"location":"reference/#ParametricOptInterface.ConstraintsInterpretation","page":"Reference","title":"ParametricOptInterface.ConstraintsInterpretation","text":"ConstraintsInterpretation <: MOI.AbstractOptimizerAttribute\n\nAttribute to define how POI.Optimizer should interpret constraints.\n\nPOI.ONLY_CONSTRAINTS: Only interpret ScalarAffineFunction constraints as linear constraints If an expression such as x >= p1 + p2 appears it will be trated like a new constraint. This is the default behaviour of POI.Optimizer\nPOI.ONLY_BOUNDS: Only interpret ScalarAffineFunction constraints as a variable bound. This is valid for constraints such as x >= p or x >= p1 + p2. If a constraint x1 + x2 >= p appears, which is not a valid variable bound it will throw an error.\nPOI.BOUNDS_AND_CONSTRAINTS: Interpret ScalarAffineFunction constraints as a variable bound if they are a valid variable bound, i.e., x >= p or x >= p1 + p2 and interpret them as linear constraints otherwise.\n\nExample\n\nMOI.set(model, POI.InterpretConstraintsAsBounds(), POI.ONLY_BOUNDS)\nMOI.set(model, POI.InterpretConstraintsAsBounds(), POI.ONLY_CONSTRAINTS)\nMOI.set(model, POI.InterpretConstraintsAsBounds(), POI.BOUNDS_AND_CONSTRAINTS)\n\n\n\n\n\n","category":"type"},{"location":"reference/#ParametricOptInterface.Optimizer","page":"Reference","title":"ParametricOptInterface.Optimizer","text":"Optimizer{T, OT <: MOI.ModelLike} <: MOI.AbstractOptimizer\n\nDeclares a Optimizer, which allows the handling of parameters in a optimization model.\n\nKeyword arguments\n\nevaluate_duals::Bool: If true, evaluates the dual of parameters. Users might want to set it to false to increase performance when the duals of parameters are not necessary. Defaults to true.\nsave_original_objective_and_constraints: If true saves the orginal function and set of the constraints as well as the original objective function inside POI.Optimizer. This is useful for printing the model but greatly increases the memory footprint. Users might want to set it to false to increase performance in applications where you don't need to query the original expressions provided to the model in constraints or in the objective. Note that this might break printing or queries such as MOI.get(model, MOI.ConstraintFunction(), c). Defaults to true.\n\nExample\n\njulia> ParametricOptInterface.Optimizer(GLPK.Optimizer())\nParametricOptInterface.Optimizer{Float64,GLPK.Optimizer}\n\n\n\n\n\n","category":"type"},{"location":"reference/#ParametricOptInterface.ParameterDual","page":"Reference","title":"ParametricOptInterface.ParameterDual","text":"ParameterDual <: MOI.AbstractVariableAttribute\n\nAttribute defined to get the dual values associated to parameters\n\nExample\n\nMOI.get(model, POI.ParameterValue(), p)\n\n\n\n\n\n","category":"type"},{"location":"reference/#ParametricOptInterface.ParameterValue","page":"Reference","title":"ParametricOptInterface.ParameterValue","text":"ParameterValue <: MOI.AbstractVariableAttribute\n\nAttribute defined to set and get parameter values\n\nExample\n\nMOI.set(model, POI.ParameterValue(), p, 2.0)\nMOI.get(model, POI.ParameterValue(), p)\n\n\n\n\n\n","category":"type"},{"location":"Examples/markowitz/#Markowitz-Efficient-Frontier","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"","category":"section"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"In this example, we solve the classical portfolio problem where we introduce the weight parameter gamma and maximize gamma text risk - textexpected return. By updating the values of gamma we trace the efficient frontier.","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Given the prices changes with mean mu and covariance Sigma, we can construct the classical portfolio problem:","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"beginarrayll\n textmaximize gamma* x^T mu - x^T Sigma x \n textsubject to x _1 = 1 \n x succeq 0\nendarray","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"The problem data was gotten from the example portfolio optimization","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"using ParametricOptInterface, MathOptInterface, JuMP, Ipopt\nusing LinearAlgebra, Plots\n\nconst POI = ParametricOptInterface\nconst MOI = MathOptInterface\n\n# generate problem data\nμ = [11.5; 9.5; 6] / 100 #expected returns\nΣ = [\n 166 34 58 #covariance matrix\n 34 64 4\n 58 4 100\n] / 100^2\n","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"We first build the model with gamma as parameter in POI","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"function first_model(μ,Σ)\n cached = MOI.Bridges.full_bridge_optimizer(\n MOIU.CachingOptimizer(\n MOIU.UniversalFallback(MOIU.Model{Float64}()),\n Ipopt.Optimizer(),\n ),\n Float64,\n )\n optimizer = POI.Optimizer(cached)\n portfolio = direct_model(optimizer)\n set_silent(portfolio)\n \n N = length(μ)\n @variable(portfolio, x[1:N] >= 0)\n @variable(portfolio, γ in MOI.Parameter(0.0))\n\n @objective(portfolio, Max, γ*dot(μ,x) - x' * Σ * x)\n @constraint(portfolio, sum(x) == 1)\n optimize!(portfolio)\n\n return portfolio\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Then, we update the gamma value in the model","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"function update_model!(portfolio,γ_value)\n γ = portfolio[:γ]\n MOI.set(portfolio, POI.ParameterValue(), γ, γ_value)\n optimize!(portfolio)\n return portfolio\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Collecting all the return and risk resuls for each gamma","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"function add_to_dict(portfolios_values,portfolio,μ,Σ)\n γ = portfolio[:γ]\n γ_value = value(γ)\n x = portfolio[:x]\n x_value = value.(x)\n portfolio_return = dot(μ,x_value)\n portfolio_deviation = x_value' * Σ * x_value\n portfolios_values[γ_value] = (portfolio_return,portfolio_deviation)\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Run the portfolio optimization for different values of gamma","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"portfolio = first_model(μ,Σ)\nportfolios_values = Dict()\n# Create a reference to the model to change it later\nportfolio_ref = [portfolio]\nadd_to_dict(portfolios_values,portfolio,μ,Σ)\n\nfor γ_value in 0.02:0.02:1.0\n portfolio_ref[] = update_model!(portfolio_ref[],γ_value)\n add_to_dict(portfolios_values,portfolio_ref[],μ,Σ)\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Plot the efficient frontier","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"portfolios_values = sort(portfolios_values,by=x->x[1])\nportfolios_values_matrix = hcat([[v[1],v[2]] for v in values(portfolios_values)]...)'\nplot(portfolios_values_matrix[:,2],portfolios_values_matrix[:,1],legend=false,\nxlabel=\"Standard Deviation\", ylabel = \"Return\", title = \"Efficient Frontier\")","category":"page"},{"location":"#ParametricOptInterface.jl-Documentation","page":"Home","title":"ParametricOptInterface.jl Documentation","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"ParametricOptInterface.jl (POI for short) is a package written on top of MathOptInterface.jl that allows users to add parameters to a MOI/JuMP problem explicitly.","category":"page"},{"location":"#Installation","page":"Home","title":"Installation","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"To install the package you can use Pkg.add as follows:","category":"page"},{"location":"","page":"Home","title":"Home","text":"pkg> add ParametricOptInterface","category":"page"},{"location":"#Contributing","page":"Home","title":"Contributing","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"When contributing please note that the package follows the JuMP style guide.","category":"page"},{"location":"Examples/benders/#Benders-Quantile-Regression","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"We will apply Norm-1 regression to the Linear Regression problem. Linear regression is a statistical tool to obtain the relation between one dependent variable and other explanatory variables. In other words, given a set of n explanatory variables X = X_1 dots X_n we would like to obtain the best possible estimate for Y. In order to accomplish such a task we make the hypothesis that Y is approximately linear function of X:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Y = sum_j =1^n beta_j X_j + varepsilon","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"where varepsilon is some random error.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The estimation of the beta values relies on observations of the variables: y^i x_1^i dots x_n^i_i.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"In this example we will solve a problem where the explanatory variables are sinusoids of differents frequencies. First, we define the number of explanatory variables and observations","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"using ParametricOptInterface,MathOptInterface,JuMP,HiGHS\nusing TimerOutputs,LinearAlgebra,Random\n\nconst POI = ParametricOptInterface\nconst MOI = MathOptInterface\nconst OPTIMIZER = HiGHS.Optimizer;\n\nconst N_Candidates = 200\nconst N_Observations = 2000\nconst N_Nodes = 200\n\nconst Observations = 1:N_Observations\nconst Candidates = 1:N_Candidates\nconst Nodes = 1:N_Nodes;","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Initialize a random number generator to keep results deterministic","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"rng = Random.MersenneTwister(123);","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Building regressors (explanatory) sinusoids","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"const X = zeros(N_Candidates, N_Observations)\nconst time = [obs / N_Observations * 1 for obs in Observations]\nfor obs in Observations, cand in Candidates\n t = time[obs]\n f = cand\n X[cand, obs] = sin(2 * pi * f * t)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Define coefficients","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"β = zeros(N_Candidates)\nfor i in Candidates\n if rand(rng) <= (1 - i / N_Candidates)^2 && i <= 100\n β[i] = 4 * rand(rng) / i\n end\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Create noisy observations","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"const y = X' * β .+ 0.1 * randn(rng, N_Observations)","category":"page"},{"location":"Examples/benders/#Benders-Decomposition","page":"Benders Quantile Regression","title":"Benders Decomposition","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Benders decomposition is used to solve large optimization problems with some special characteristics. LP's can be solved with classical linear optimization methods such as the Simplex method or Interior point methods provided by solvers like HiGHS. However, these methods do not scale linearly with the problem size. In the Benders decomposition framework we break the problem in two pieces: A outer and a inner problem. Of course some variables will belong to both problems, this is where the cleverness of Benders kicks in: The outer problem is solved and passes the shared variables to the inner. The inner problem is solved with the shared variables FIXED to the values given by the outer problem. The solution of the inner problem can be used to generate a constraint to the outer problem to describe the linear approximation of the cost function of the shared variables. In many cases, like stochastic programming, the inner problems have a interesting structure and might be broken in smaller problem to be solved in parallel.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"We will descibe the decomposition similarly to what is done in: Introduction to Linear Optimization, Bertsimas & Tsitsiklis (Chapter 6.5): Where the problem in question has the form","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x y_k c^T x + f_1^T y_1 + dots + f_n^T y_n notag \n textsubject to Ax = b notag \n B_1 x + D_1 y_1 = d_1 notag \n dots dots notag \n B_n x + D_n y_n = d_n notag \n x y_1 y_n geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/#Inner-Problem","page":"Benders Quantile Regression","title":"Inner Problem","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Given a solution for the x variables we can define the inner problem as","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n z_k(x) = min_y_k f_k^T y_k notag \n textsubject to D_k y_k = d_k - B_k x notag \n y_k geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The z_k(x) function represents the cost of the subproblem given a solution for x. This function is a convex function because x affects only the right hand side of the problem (this is a standard results in LP theory).","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"For the special case of the Norm-1 reggression the problem is written as:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\nz_k(beta) = min_varepsilon^up varepsilon^dw sum_i in ObsSet(k) varepsilon^up_i + varepsilon^dw_i notag \n textsubject to varepsilon^up_i geq + y_i - sum_j in Candidates beta_j x_ij forall i in ObsSet(k) notag \n varepsilon^dw_i geq - y_i + sum_j in Candidates beta_j x_ij forall i in ObsSet(k) notag \n varepsilon^up_i varepsilon^dw_i geq 0 forall i in ObsSet(k) notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The collection ObsSet(k) is a sub-set of the N_Observations. Any partition of the N_Observations collection is valid. In this example we will partition with the function:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function ObsSet(K)\n obs_per_block = div(N_Observations, N_Nodes)\n return (1+(K-1)*obs_per_block):(K*obs_per_block)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Which can be written in POI as follows:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function inner_model(K)\n\n # initialize the POI model\n inner = direct_model(POI.Optimizer(OPTIMIZER()))\n\n # Define local optimization variables for norm-1 error\n @variables(inner, begin\n ɛ_up[ObsSet(K)] >= 0\n ɛ_dw[ObsSet(K)] >= 0\n end)\n\n # create the regression coefficient representation\n # Create parameters\n β = [@variable(inner, set = MOI.Parameter(0.0)) for i in 1:N_Candidates]\n for (i, βi) in enumerate(β)\n set_name(βi, \"β[$i]\")\n end\n\n # create local constraints\n # Note that *parameter* algebra is implemented just like variables\n # algebra. We can multiply parameters by constants, add parameters,\n # sum parameters and variables and so on.\n @constraints(\n inner,\n begin\n ɛ_up_ctr[i in ObsSet(K)],\n ɛ_up[i] >= +sum(X[j, i] * β[j] for j in Candidates) - y[i]\n ɛ_dw_ctr[i in ObsSet(K)],\n ɛ_dw[i] >= -sum(X[j, i] * β[j] for j in Candidates) + y[i]\n end\n )\n\n # create local objective function\n @objective(inner, Min, sum(ɛ_up[i] + ɛ_dw[i] for i in ObsSet(K)))\n\n # return the correct group of parameters\n return (inner, β)\nend","category":"page"},{"location":"Examples/benders/#Outer-Problem","page":"Benders Quantile Regression","title":"Outer Problem","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now that all pieces of the original problem can be representad by the convex z_k(x) functions we can recast the problem in the the equivalent form:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x c^T x + z_1(x) + dots + z_n(x) notag \n textsubject to Ax = b notag \n x geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"However we cannot pass a problem in this form to a linear programming solver (it could be passed to other kinds of solvers).","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Another standart result of optimization theory is that a convex function can be represented by its supporting hyper-planes:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n z_k(x) = min_z x z notag \n textsubject to z geq pi_k(hatx) (x - hatx) + z_k(hatx) forall hatx in dom(z_k) notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Then we can re-write (again) the outer problem as","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x z_k c^T x + z_1 + dots + z_n notag \n textsubject to z_i geq pi_i(hatx) (x - hatx) + z_i(hatx) forall hatx in dom(z_i) i in 1 dots n notag \n Ax = b notag \n x geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Which is a linear program! However, it has infinitely many constraints !!","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"We can relax the infinite constraints and write:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x z_k c^T x + z_1 + dots + z_n notag \n textsubject to Ax = b notag \n x geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"But now its only an underestimated problem. In the case of our problem it can be written as:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_varepsilon beta sum_i in Nodes varepsilon_i notag \n textsubject to varepsilon_i geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"This model can be written in JuMP:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function outer_model()\n outer = Model(OPTIMIZER)\n @variables(outer, begin\n ɛ[Nodes] >= 0\n β[1:N_Candidates]\n end)\n @objective(outer, Min, sum(ɛ[i] for i in Nodes))\n sol = zeros(N_Candidates)\n return (outer, ɛ, β, sol)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The method to solve the outer problem and query its solution is given here:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function outer_solve(outer_model)\n model = outer_model[1]\n β = outer_model[3]\n optimize!(model)\n return (value.(β), objective_value(model))\nend","category":"page"},{"location":"Examples/benders/#Supporting-Hyperplanes","page":"Benders Quantile Regression","title":"Supporting Hyperplanes","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"With these building blocks in hand, we can start building the algorithm. So far we know how to:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Solve the relaxed outer problem\nObtain the solution for the hatx (or beta in our case)","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now we can:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Fix the values of hatx in the inner problems\nSolve the inner problems\nquery the solution of the inner problems to obtain the supporting hyperplane","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"the value of z_k(hatx), which is the objective value of the inner problem","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"and the derivative pi_k(hatx) = fracd z_k(x)d x Big_x = hatx The derivative is the dual variable associated to the variable hatx, which results by applying the chain rule on the constraints duals. These new steps are executed by the function:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function inner_solve(model, outer_solution)\n β0 = outer_solution[1]\n inner = model[1]\n\n # The first step is to fix the values given by the outer problem\n @timeit \"fix\" begin\n β = model[2]\n MOI.set.(inner, POI.ParameterValue(), β, β0)\n end\n\n # here the inner problem is solved\n @timeit \"opt\" optimize!(inner)\n\n # query dual variables, which are sensitivities\n # They represent the subgradient (almost a derivative)\n # of the objective function for infinitesimal variations\n # of the constants in the linear constraints\n # POI: we can query dual values of *parameters*\n π = MOI.get.(inner, POI.ParameterDual(), β)\n\n # π2 = shadow_price.(β_fix)\n obj = objective_value(inner)\n rhs = obj - dot(π, β0)\n return (rhs, π, obj)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now that we have cutting plane in hand we can add them to the outer problem","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function outer_add_cut(outer_model, cut_info, node)\n outer = outer_model[1]\n ɛ = outer_model[2]\n β = outer_model[3]\n\n rhs = cut_info[1]\n π = cut_info[2]\n\n @constraint(outer, ɛ[node] >= sum(π[j] * β[j] for j in Candidates) + rhs)\nend","category":"page"},{"location":"Examples/benders/#Algorithm-wrap-up","page":"Benders Quantile Regression","title":"Algorithm wrap up","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The complete algorithm is","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Solve the relaxed master problem\nObtain the solution for the hatx (or beta in our case)\nFix the values of hatx in the slave problems\nSolve the slave problem\nquery the solution of the slave problem to obtain the supporting hyperplane\nadd hyperplane to master problem\nrepeat","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now we grab all the pieces that we built and we write the benders algorithm by calling the above function in a proper order.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The macros @timeit are use to time each step of the algorithm.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function decomposed_model(;print_timer_outputs::Bool = true)\n reset_timer!() # reset timer fo comparision\n time_init = @elapsed @timeit \"Init\" begin\n # Create the outer problem with no cuts\n @timeit \"outer\" outer = outer_model()\n\n # initialize solution for the regression coefficients in zero\n @timeit \"Sol\" solution = (zeros(N_Candidates), Inf)\n best_sol = deepcopy(solution)\n\n # Create the inner problems\n @timeit \"inners\" inners =\n [inner_model(i) for i in Candidates]\n\n # Save initial version of the inner problems and create\n # the first set of cuts\n @timeit \"Cuts\" cuts =\n [inner_solve(inners[i], solution) for i in Candidates]\n end\n\n UB = +Inf\n LB = -Inf\n\n # println(\"Initialize Iterative step\")\n time_loop = @elapsed @timeit \"Loop\" for k in 1:80\n\n # Add cuts generated from each inner problem to the outer problem\n @timeit \"add cuts\" for i in Candidates\n outer_add_cut(outer, cuts[i], i)\n end\n\n # Solve the outer problem with the new set of cuts\n # Obtain new solution candidate for the regression coefficients\n @timeit \"solve outer\" solution = outer_solve( outer)\n\n # Pass the new candidate solution to each of the inner problems\n # Solve the inner problems and obtain cutting planes\n @timeit \"solve nodes\" for i in Candidates\n cuts[i] = inner_solve( inners[i], solution)\n end\n\n LB = solution[2]\n new_UB = sum(cuts[i][3] for i in Candidates)\n if new_UB <= UB\n best_sol = deepcopy(solution)\n end\n UB = min(UB, new_UB)\n\n if abs(UB - LB) / (abs(UB) + abs(LB)) < 0.05\n break\n end\n end\n\n print_timer_outputs && print_timer()\n\n return best_sol[1]\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Run benders decomposition with POI","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"β2 = decomposed_model(; print_timer_outputs = false);\nGC.gc()\nβ2 = decomposed_model();","category":"page"}] +[{"location":"manual/#Manual","page":"Manual","title":"Manual","text":"","category":"section"},{"location":"manual/#Why-use-parameters?","page":"Manual","title":"Why use parameters?","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"A typical optimization model built using MathOptInterface.jl (MOIfor short) has two main components:","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"Variables\nConstants","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"Using these basic elements, one can create functions and sets that, together, form the desired optimization model. The goal of POI is the implementation of a third type, parameters, which","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"are declared similar to a variable, and inherits some functionalities (e.g. dual calculation)\nacts like a constant, in the sense that it has a fixed value that will remain the same unless explicitely changed by the user","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"A main concern is to efficiently implement this new type, as one typical usage is to change its value to analyze the model behavior, without the need to build a new one from scratch.","category":"page"},{"location":"manual/#How-it-works","page":"Manual","title":"How it works","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"The main idea applied in POI is that the interaction between the solver, e.g. GLPK, and the optimization model will be handled by MOI as usual. Because of that, POI is a higher level wrapper around MOI, responsible for receiving variables, constants and parameters, and forwarding to the lower level model only variables and constants.","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"As POI receives parameters, it must analyze and decide how they should be handled on the lower level optimization model (the MOI model).","category":"page"},{"location":"manual/#Usage","page":"Manual","title":"Usage","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"In this manual we describe how to interact with the optimization model at the MOI level. In the Examples section you can find some tutorials with the JuMP usage.","category":"page"},{"location":"manual/#Supported-constraints","page":"Manual","title":"Supported constraints","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"This is a list of supported MOI constraint functions that can handle parameters. If you try to add a parameter to a function that is not listed here, it will return an unsupported error.","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI Function\nScalarAffineFunction\nScalarQuadraticFunction\nVectorAffineFunction","category":"page"},{"location":"manual/#Supported-objective-functions","page":"Manual","title":"Supported objective functions","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI Function\nScalarAffineFunction\nScalarQuadraticFunction","category":"page"},{"location":"manual/#Declare-a-Optimizer","page":"Manual","title":"Declare a Optimizer","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"In order to use parameters, the user needs to declare a ParametricOptInterface.Optimizer on top of a MOI optimizer, such as HiGHS.Optimizer().","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"using ParametricOptInterface, MathOptInterface, HiGHS\n# Rename ParametricOptInterface and MathOptInterface to simplify the code\nconst POI = ParametricOptInterface\nconst MOI = MathOptInterface\n# Define a Optimizer on top of the MOI optimizer\noptimizer = POI.Optimizer(HiGHS.Optimizer())","category":"page"},{"location":"manual/#Parameters","page":"Manual","title":"Parameters","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"A MOI.Parameter is a set used to define a variable with a fixed value that can be changed by the user. It is analogous to MOI.EqualTo, but can be used by special methods like the ones in this package to remove the fixed variable from the optimization problem. This permits the usage of multiplicative parameters in lienar models and might speedup solves since the number of variables is reduced.","category":"page"},{"location":"manual/#Adding-a-new-parameter-to-a-model","page":"Manual","title":"Adding a new parameter to a model","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"To add a parameter to a model, we must use the MOI.add_constrained_variable() function, passing as its arguments the model and a MOI.Parameter with its given value:","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"y, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))","category":"page"},{"location":"manual/#Changing-the-parameter-value","page":"Manual","title":"Changing the parameter value","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"To change a given parameter's value, access its VariableIndex and set it to the new value using the MOI.Parameter structure.","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI.set(optimizer, POI.ParameterValue(), y, MOI.Parameter(2.0))","category":"page"},{"location":"manual/#Retrieving-the-dual-of-a-parameter","page":"Manual","title":"Retrieving the dual of a parameter","text":"","category":"section"},{"location":"manual/","page":"Manual","title":"Manual","text":"Given an optimized model, one can compute the dual associated to a parameter, as long as it is an additive term in the constraints or objective. One can do so by getting the MOI.ConstraintDual attribute of the parameter's MOI.ConstraintIndex:","category":"page"},{"location":"manual/","page":"Manual","title":"Manual","text":"MOI.get(optimizer, POI.ParameterDual(), y)","category":"page"},{"location":"Examples/example/#Basic-Examples","page":"Basic Examples","title":"Basic Examples","text":"","category":"section"},{"location":"Examples/example/#MOI-example-step-by-step-usage","page":"Basic Examples","title":"MOI example - step by step usage","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's write a step-by-step example of POI usage at the MOI level.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"First, we declare a ParametricOptInterface.Optimizer on top of a MOI optimizer. In the example, we consider HiGHS as the underlying solver:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing MathOptInterface\nusing ParametricOptInterface\n\nconst MOI = MathOptInterface\nconst POI = ParametricOptInterface\n\noptimizer = POI.Optimizer(HiGHS.Optimizer())","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We declare the variable x as in a typical MOI model, and we add a non-negativity constraint:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"x = MOI.add_variables(optimizer, 2)\nfor x_i in x\n MOI.add_constraint(optimizer, x_i, MOI.GreaterThan(0.0))\nend","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Now, let's consider 3 MOI.Parameter. Two of them, y, z, will be placed in the constraints and one, w, in the objective function. We'll start all three of them with a value equal to 0:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"w, cw = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))\ny, cy = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))\nz, cz = MOI.add_constrained_variable(optimizer, MOI.Parameter(0.0))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's add the constraints. Notice that we treat parameters and variables in the same way when building the functions that will be placed in some set to create a constraint (Function-in-Set):","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"cons1 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([2.0, 1.0, 3.0], [x[1], x[2], y]), 0.0)\nci1 = MOI.add_constraint(optimizer, cons1, MOI.LessThan(4.0))\ncons2 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([1.0, 2.0, 0.5], [x[1], x[2], z]), 0.0)\nci2 = MOI.add_constraint(optimizer, cons2, MOI.LessThan(4.0))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Finally, we declare and add the objective function, with its respective sense:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"obj_func = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([4.0, 3.0, 2.0], [x[1], x[2], w]), 0.0)\nMOI.set(optimizer, MOI.ObjectiveFunction{MOI.ScalarAffineFunction{Float64}}(), obj_func)\nMOI.set(optimizer, MOI.ObjectiveSense(), MOI.MAX_SENSE)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Now we can optimize the model and assess its termination and primal status:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.optimize!(optimizer)\nMOI.get(optimizer, MOI.TerminationStatus())\nMOI.get(optimizer, MOI.PrimalStatus())","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Given the optimized solution, we check that its value is, as expected, equal to 28/3, and the solution vector x is [4/3, 4/3]:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(MOI.get(optimizer, MOI.ObjectiveValue()), 28/3, atol = 1e-4)\nisapprox(MOI.get(optimizer, MOI.VariablePrimal(), x[1]), 4/3, atol = 1e-4)\nisapprox(MOI.get(optimizer, MOI.VariablePrimal(), x[2]), 4/3, atol = 1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We can also retrieve the dual values associated to each parameter, as they are all additive:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.get(optimizer, MOI.ConstraintDual(), cy)\nMOI.get(optimizer, MOI.ConstraintDual(), cz)\nMOI.get(optimizer, MOI.ConstraintDual(), cw)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Notice the direct relationship in this case between the parameters' duals and the associated constraints' duals. The y parameter, for example, only appears in the cons1. If we compare their duals, we can check that the dual of y is equal to its coefficient in cons1 multiplied by the constraint's dual itself, as expected:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(MOI.get(optimizer, MOI.ConstraintDual(), cy), 3*MOI.get(optimizer, MOI.ConstraintDual(), ci1), atol = 1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The same is valid for the remaining parameters. In case a parameter appears in more than one constraint, or both some constraints and in the objective function, its dual will be equal to the linear combination of the functions' duals multiplied by the respective coefficients.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"So far, we only added some parameters that had no influence at first in solving the model. Let's change the values associated to each parameter to assess its implications. First, we set the value of parameters y and z to 1.0. Notice that we are changing the feasible set of the decision variables:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(optimizer, POI.ParameterValue(), y, 1.0)\nMOI.set(optimizer, POI.ParameterValue(), z, 1.0)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"However, if we check the optimized model now, there will be no changes in the objective function value or the in the optimized decision variables:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox.(MOI.get(optimizer, MOI.ObjectiveValue()), 28/3, atol = 1e-4)\nisapprox.(MOI.get(optimizer, MOI.VariablePrimal(), x[1]), 4/3, atol = 1e-4)\nisapprox.(MOI.get(optimizer, MOI.VariablePrimal(), x[2]), 4/3, atol = 1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Although we changed the parameter values, we didn't optimize the model yet. Thus, to apply the parameters' changes, the model must be optimized again:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.optimize!(optimizer)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The MOI.optimize!() function handles the necessary updates, properly fowarding the new outer model (POI model) additions to the inner model (MOI model) which will be handled by the solver. Now we can assess the updated optimized information:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox.(MOI.get(optimizer, MOI.ObjectiveValue()), 3.0, atol = 1e-4)\nMOI.get.(optimizer, MOI.VariablePrimal(), x) == [0.0, 1.0]","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If we update the parameter w, associated to the objective function, we are simply adding a constant to it. Notice how the new objective function is precisely equal to the previous one plus the new value of w. In addition, as we didn't update the feasible set, the optimized decision variables remain the same.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(optimizer, POI.ParameterValue(), w, 2.0)\n# Once again, the model must be optimized to incorporate the changes\nMOI.optimize!(optimizer)\n# Only the objective function value changes\nisapprox.(MOI.get(optimizer, MOI.ObjectiveValue()), 7.0, atol = 1e-4)\nMOI.get.(optimizer, MOI.VariablePrimal(), x) == [0.0, 1.0]","category":"page"},{"location":"Examples/example/#JuMP-Example-step-by-step-usage","page":"Basic Examples","title":"JuMP Example - step by step usage","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's write a step-by-step example of POI usage at the JuMP level.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"First, we declare a Model on top of a Optimizer of an underlying solver. In the example, we consider HiGHS as the underlying solver:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\n\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = Model(() -> ParametricOptInterface.Optimizer(HiGHS.Optimizer()))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We declare the variable x as in a typical JuMP model:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@variable(model, x[i = 1:2] >= 0)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Now, let's consider 3 MOI.Parameter. Two of them, y, z, will be placed in the constraints and one, w, in the objective function. We'll start all three of them with a value equal to 0:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@variable(model, y in MOI.Parameter(0.0))\n@variable(model, z in MOI.Parameter(0.0))\n@variable(model, w in MOI.Parameter(0.0))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's add the constraints. Notice that we treat parameters the same way we treat variables when writing the model:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@constraint(model, c1, 2x[1] + x[2] + 3y <= 4)\n@constraint(model, c2, x[1] + 2x[2] + 0.5z <= 4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Finally, we declare and add the objective function, with its respective sense:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"@objective(model, Max, 4x[1] + 3x[2] + 2w)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We can optimize the model and assess its termination and primal status:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"optimize!(model)\ntermination_status(model)\nprimal_status(model)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Given the optimized solution, we check that its value is, as expected, equal to 28/3, and the solution vector x is [4/3, 4/3]:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(objective_value(model), 28/3)\nisapprox(value.(x), [4/3, 4/3])","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We can also retrieve the dual values associated to each parameter, as they are all additive:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.get(model, POI.ParameterDual(), y)\nMOI.get(model, POI.ParameterDual(), z)\nMOI.get(model, POI.ParameterDual(), w)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Notice the direct relationship in this case between the parameters' duals and the associated constraints' duals. The y parameter, for example, only appears in the c1. If we compare their duals, we can check that the dual of y is equal to its coefficient in c1 multiplied by the constraint's dual itself, as expected:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"dual_of_y = MOI.get(model, POI.ParameterDual(), y)\nisapprox(dual_of_y, 3 * dual(c1))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The same is valid for the remaining parameters. In case a parameter appears in more than one constraint, or both some constraints and in the objective function, its dual will be equal to the linear combination of the functions' duals multiplied by the respective coefficients.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"So far, we only added some parameters that had no influence at first in solving the model. Let's change the values associated to each parameter to assess its implications. First, we set the value of parameters y and z to 1.0. Notice that we are changing the feasible set of the decision variables:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), y, 1)\nMOI.set(model, POI.ParameterValue(), z, 1)\n# We can also query the value in the parameters\nMOI.get(model, POI.ParameterValue(), y)\nMOI.get(model, POI.ParameterValue(), z)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To apply the parameters' changes, the model must be optimized again:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"optimize!(model)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"The optimize! function handles the necessary updates, properly fowarding the new outer model (POI model) additions to the inner model (MOI model) which will be handled by the solver. Now we can assess the updated optimized information:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"isapprox(objective_value(model), 3)\nisapprox(value.(x), [0, 1])","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If we update the parameter w, associated to the objective function, we are simply adding a constant to it. Notice how the new objective function is precisely equal to the previous one plus the new value of w. In addition, as we didn't update the feasible set, the optimized decision variables remain the same.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), w, 2)\n# Once again, the model must be optimized to incorporate the changes\noptimize!(model)\n# Only the objective function value changes\nisapprox(objective_value(model), 7)\nisapprox(value.(x), [0, 1])","category":"page"},{"location":"Examples/example/#JuMP-Example-Declaring-vectors-of-parameters","page":"Basic Examples","title":"JuMP Example - Declaring vectors of parameters","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Many times it is useful to declare a vector of parameters just like we declare a vector of variables, the JuMP syntax for variables works with parameters too:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = Model(() -> ParametricOptInterface.Optimizer(HiGHS.Optimizer()))\n@variable(model, x[i = 1:3] >= 0)\n@variable(model, p1[i = 1:3] in MOI.Parameter(0.0))\n@variable(model, p2[i = 1:3] in MOI.Parameter.([1, 10, 45]))\n@variable(model, p3[i = 1:3] in MOI.Parameter.(ones(3)))","category":"page"},{"location":"Examples/example/#JuMP-Example-Dealing-with-parametric-expressions-as-variable-bounds","page":"Basic Examples","title":"JuMP Example - Dealing with parametric expressions as variable bounds","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"A very common pattern that appears when using ParametricOptInterface is to add variable and later add some expression with parameters that represent the variable bound. The following code illustrates the pattern:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = direct_model(POI.Optimizer(HiGHS.Optimizer()))\n@variable(model, x)\n@variable(model, p in MOI.Parameter(0.0))\n@constraint(model, x >= p)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Since parameters are treated like variables JuMP lowers this to MOI as x - p >= 0 which is not a variable bound but a linear constraint.This means that the current representation of this problem at the solver level is:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"beginalign\n min_x 0\n \n textst x in mathbbR \n x - p geq 0\nendalign","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This behaviour might be undesirable because it creates extra rows in your problem. Users can set the ParametricOptInterface.ConstraintsInterpretation to control how the linear constraints should be interpreted. The pattern advised for users seeking the most performance out of ParametricOptInterface should use the followig pattern:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using HiGHS\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\nmodel = direct_model(POI.Optimizer(HiGHS.Optimizer()))\n@variable(model, x)\n@variable(model, p in MOI.Parameter(0.0))\n\n# Indicate that all the new constraints will be valid variable bounds\nMOI.set(model, POI.ConstraintsInterpretation(), POI.ONLY_BOUNDS)\n@constraint(model, x >= p)\n# The name of this constraint was different to inform users that this is a\n# variable bound.\n\n# Indicate that all the new constraints will not be variable bounds\nMOI.set(model, POI.ConstraintsInterpretation(), POI.ONLY_CONSTRAINTS)\n# @constraint(model, ...)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This way the mathematical representation of the problem will be:","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"beginalign\n min_x 0\n \n textst x geq p\nendalign","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"which might lead to faster solves.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Users that just want everything to work can use the default value POI.ONLY_CONSTRAINTS or try to use POI.BOUNDS_AND_CONSTRAINTS and leave it to ParametricOptInterface to interpret the constraints as bounds when applicable and linear constraints otherwise.","category":"page"},{"location":"Examples/example/#MOI-Example-Parameters-multiplying-Quadratic-terms","page":"Basic Examples","title":"MOI Example - Parameters multiplying Quadratic terms","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's start with a simple quadratic problem","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using Ipopt\nusing MathOptInterface\nusing ParametricOptInterface\n\nconst MOI = MathOptInterface\nconst POI = ParametricOptInterface\n\noptimizer = POI.Optimizer(Ipopt.Optimizer())\n\nx = MOI.add_variable(optimizer)\ny = MOI.add_variable(optimizer)\nMOI.add_constraint(optimizer, x, MOI.GreaterThan(0.0))\nMOI.add_constraint(optimizer, y, MOI.GreaterThan(0.0))\n\ncons1 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([2.0, 1.0], [x, y]), 0.0)\nci1 = MOI.add_constraint(optimizer, cons1, MOI.LessThan(4.0))\ncons2 = MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([1.0, 2.0], [x, y]), 0.0)\nci2 = MOI.add_constraint(optimizer, cons2, MOI.LessThan(4.0))\n\nMOI.set(optimizer, MOI.ObjectiveSense(), MOI.MAX_SENSE)\nobj_func = MOI.ScalarQuadraticFunction(\n [MOI.ScalarQuadraticTerm(1.0, x, x)\n MOI.ScalarQuadraticTerm(1.0, y, y)],\n MOI.ScalarAffineTerm{Float64}[],\n 0.0,\n)\nMOI.set(\n optimizer,\n MOI.ObjectiveFunction{MOI.ScalarQuadraticFunction{Float64}}(),\n obj_func,\n)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To multiply a parameter in a quadratic term, the user will need to use the POI.QuadraticObjectiveCoef model attribute.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"p = first(MOI.add_constrained_variable.(optimizer, MOI.Parameter(1.0)))\nMOI.set(optimizer, POI.QuadraticObjectiveCoef(), (x,y), p)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This function will add the term p*xy to the objective function. It's also possible to multiply a scalar affine function to the quadratic term.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(optimizer, POI.QuadraticObjectiveCoef(), (x,y), 2p+3)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"This will set the term (2p+3)*xy to the objective function (it overwrites the last set). Then, just optimize the model.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.optimize!(model)\nisapprox(MOI.get(model, MOI.ObjectiveValue()), 32/3, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), x), 4/3, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), y), 4/3, atol=1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To change the parameter just set POI.ParameterValue and optimize again.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), p, 2.0)\nMOI.optimize!(model)\nisapprox(MOI.get(model, MOI.ObjectiveValue()), 128/9, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), x), 4/3, atol=1e-4)\nisapprox(MOI.get(model, MOI.VariablePrimal(), y), 4/3, atol=1e-4)","category":"page"},{"location":"Examples/example/#JuMP-Example-Parameters-multiplying-Quadratic-terms","page":"Basic Examples","title":"JuMP Example - Parameters multiplying Quadratic terms","text":"","category":"section"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Let's get the same MOI example","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"using Ipopt\nusing JuMP\nusing ParametricOptInterface\nconst POI = ParametricOptInterface\n\noptimizer = POI.Optimizer(Ipopt.Optimizer())\nmodel = direct_model(optimizer)\n\n@variable(model, x >= 0)\n@variable(model, y >= 0)\n@variable(model, p in MOI.Parameter(1.0))\n@constraint(model, 2x + y <= 4)\n@constraint(model, x + 2y <= 4)\n@objective(model, Max, (x^2 + y^2)/2)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"We use the same MOI function to add the parameter multiplied to the quadratic term.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)), 2index(p)+3)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"If the user print the model, the term (2p+3)*xy won't show. It's possible to retrieve the parametric function multiplying the term xy with MOI.get.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.get(backend(model), POI.QuadraticObjectiveCoef(), (index(x),index(y)))","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"Then, just optimize the model","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"optimize!(model)\nisapprox(objective_value(model), 32/3, atol=1e-4)\nisapprox(value(x), 4/3, atol=1e-4)\nisapprox(value(y), 4/3, atol=1e-4)","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"To change the parameter just set POI.ParameterValue and optimize again.","category":"page"},{"location":"Examples/example/","page":"Basic Examples","title":"Basic Examples","text":"MOI.set(model, POI.ParameterValue(), p, 2.0)\noptimize!(model)\nisapprox(objective_value(model), 128/9, atol=1e-4)\nisapprox(value(x), 4/3, atol=1e-4)\nisapprox(value(y), 4/3, atol=1e-4)","category":"page"},{"location":"reference/#Reference","page":"Reference","title":"Reference","text":"","category":"section"},{"location":"reference/","page":"Reference","title":"Reference","text":"ParametricOptInterface.ConstraintsInterpretation\nParametricOptInterface.Optimizer\nParametricOptInterface.ParameterDual\nParametricOptInterface.ParameterValue","category":"page"},{"location":"reference/#ParametricOptInterface.ConstraintsInterpretation","page":"Reference","title":"ParametricOptInterface.ConstraintsInterpretation","text":"ConstraintsInterpretation <: MOI.AbstractOptimizerAttribute\n\nAttribute to define how POI.Optimizer should interpret constraints.\n\nPOI.ONLY_CONSTRAINTS: Only interpret ScalarAffineFunction constraints as linear constraints If an expression such as x >= p1 + p2 appears it will be trated like a new constraint. This is the default behaviour of POI.Optimizer\nPOI.ONLY_BOUNDS: Only interpret ScalarAffineFunction constraints as a variable bound. This is valid for constraints such as x >= p or x >= p1 + p2. If a constraint x1 + x2 >= p appears, which is not a valid variable bound it will throw an error.\nPOI.BOUNDS_AND_CONSTRAINTS: Interpret ScalarAffineFunction constraints as a variable bound if they are a valid variable bound, i.e., x >= p or x >= p1 + p2 and interpret them as linear constraints otherwise.\n\nExample\n\nMOI.set(model, POI.InterpretConstraintsAsBounds(), POI.ONLY_BOUNDS)\nMOI.set(model, POI.InterpretConstraintsAsBounds(), POI.ONLY_CONSTRAINTS)\nMOI.set(model, POI.InterpretConstraintsAsBounds(), POI.BOUNDS_AND_CONSTRAINTS)\n\n\n\n\n\n","category":"type"},{"location":"reference/#ParametricOptInterface.Optimizer","page":"Reference","title":"ParametricOptInterface.Optimizer","text":"Optimizer{T, OT <: MOI.ModelLike} <: MOI.AbstractOptimizer\n\nDeclares a Optimizer, which allows the handling of parameters in a optimization model.\n\nKeyword arguments\n\nevaluate_duals::Bool: If true, evaluates the dual of parameters. Users might want to set it to false to increase performance when the duals of parameters are not necessary. Defaults to true.\nsave_original_objective_and_constraints: If true saves the orginal function and set of the constraints as well as the original objective function inside POI.Optimizer. This is useful for printing the model but greatly increases the memory footprint. Users might want to set it to false to increase performance in applications where you don't need to query the original expressions provided to the model in constraints or in the objective. Note that this might break printing or queries such as MOI.get(model, MOI.ConstraintFunction(), c). Defaults to true.\n\nExample\n\njulia> ParametricOptInterface.Optimizer(GLPK.Optimizer())\nParametricOptInterface.Optimizer{Float64,GLPK.Optimizer}\n\n\n\n\n\n","category":"type"},{"location":"reference/#ParametricOptInterface.ParameterDual","page":"Reference","title":"ParametricOptInterface.ParameterDual","text":"ParameterDual <: MOI.AbstractVariableAttribute\n\nAttribute defined to get the dual values associated to parameters\n\nExample\n\nMOI.get(model, POI.ParameterValue(), p)\n\n\n\n\n\n","category":"type"},{"location":"reference/#ParametricOptInterface.ParameterValue","page":"Reference","title":"ParametricOptInterface.ParameterValue","text":"ParameterValue <: MOI.AbstractVariableAttribute\n\nAttribute defined to set and get parameter values\n\nExample\n\nMOI.set(model, POI.ParameterValue(), p, 2.0)\nMOI.get(model, POI.ParameterValue(), p)\n\n\n\n\n\n","category":"type"},{"location":"Examples/markowitz/#Markowitz-Efficient-Frontier","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"","category":"section"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"In this example, we solve the classical portfolio problem where we introduce the weight parameter gamma and maximize gamma text risk - textexpected return. By updating the values of gamma we trace the efficient frontier.","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Given the prices changes with mean mu and covariance Sigma, we can construct the classical portfolio problem:","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"beginarrayll\n textmaximize gamma* x^T mu - x^T Sigma x \n textsubject to x _1 = 1 \n x succeq 0\nendarray","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"The problem data was gotten from the example portfolio optimization","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"using ParametricOptInterface, MathOptInterface, JuMP, Ipopt\nusing LinearAlgebra, Plots\n\nconst POI = ParametricOptInterface\nconst MOI = MathOptInterface\n\n# generate problem data\nμ = [11.5; 9.5; 6] / 100 #expected returns\nΣ = [\n 166 34 58 #covariance matrix\n 34 64 4\n 58 4 100\n] / 100^2\n","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"We first build the model with gamma as parameter in POI","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"function first_model(μ,Σ)\n cached = MOI.Bridges.full_bridge_optimizer(\n MOIU.CachingOptimizer(\n MOIU.UniversalFallback(MOIU.Model{Float64}()),\n Ipopt.Optimizer(),\n ),\n Float64,\n )\n optimizer = POI.Optimizer(cached)\n portfolio = direct_model(optimizer)\n set_silent(portfolio)\n \n N = length(μ)\n @variable(portfolio, x[1:N] >= 0)\n @variable(portfolio, γ in MOI.Parameter(0.0))\n\n @objective(portfolio, Max, γ*dot(μ,x) - x' * Σ * x)\n @constraint(portfolio, sum(x) == 1)\n optimize!(portfolio)\n\n return portfolio\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Then, we update the gamma value in the model","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"function update_model!(portfolio,γ_value)\n γ = portfolio[:γ]\n MOI.set(portfolio, POI.ParameterValue(), γ, γ_value)\n optimize!(portfolio)\n return portfolio\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Collecting all the return and risk resuls for each gamma","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"function add_to_dict(portfolios_values,portfolio,μ,Σ)\n γ = portfolio[:γ]\n γ_value = value(γ)\n x = portfolio[:x]\n x_value = value.(x)\n portfolio_return = dot(μ,x_value)\n portfolio_deviation = x_value' * Σ * x_value\n portfolios_values[γ_value] = (portfolio_return,portfolio_deviation)\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Run the portfolio optimization for different values of gamma","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"portfolio = first_model(μ,Σ)\nportfolios_values = Dict()\n# Create a reference to the model to change it later\nportfolio_ref = [portfolio]\nadd_to_dict(portfolios_values,portfolio,μ,Σ)\n\nfor γ_value in 0.02:0.02:1.0\n portfolio_ref[] = update_model!(portfolio_ref[],γ_value)\n add_to_dict(portfolios_values,portfolio_ref[],μ,Σ)\nend","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"Plot the efficient frontier","category":"page"},{"location":"Examples/markowitz/","page":"Markowitz Efficient Frontier","title":"Markowitz Efficient Frontier","text":"portfolios_values = sort(portfolios_values,by=x->x[1])\nportfolios_values_matrix = hcat([[v[1],v[2]] for v in values(portfolios_values)]...)'\nplot(portfolios_values_matrix[:,2],portfolios_values_matrix[:,1],legend=false,\nxlabel=\"Standard Deviation\", ylabel = \"Return\", title = \"Efficient Frontier\")","category":"page"},{"location":"#ParametricOptInterface.jl-Documentation","page":"Home","title":"ParametricOptInterface.jl Documentation","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"ParametricOptInterface.jl (POI for short) is a package written on top of MathOptInterface.jl that allows users to add parameters to a MOI/JuMP problem explicitly.","category":"page"},{"location":"#Installation","page":"Home","title":"Installation","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"To install the package you can use Pkg.add as follows:","category":"page"},{"location":"","page":"Home","title":"Home","text":"pkg> add ParametricOptInterface","category":"page"},{"location":"#Contributing","page":"Home","title":"Contributing","text":"","category":"section"},{"location":"","page":"Home","title":"Home","text":"When contributing please note that the package follows the JuMP style guide.","category":"page"},{"location":"Examples/benders/#Benders-Quantile-Regression","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"We will apply Norm-1 regression to the Linear Regression problem. Linear regression is a statistical tool to obtain the relation between one dependent variable and other explanatory variables. In other words, given a set of n explanatory variables X = X_1 dots X_n we would like to obtain the best possible estimate for Y. In order to accomplish such a task we make the hypothesis that Y is approximately linear function of X:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Y = sum_j =1^n beta_j X_j + varepsilon","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"where varepsilon is some random error.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The estimation of the beta values relies on observations of the variables: y^i x_1^i dots x_n^i_i.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"In this example we will solve a problem where the explanatory variables are sinusoids of differents frequencies. First, we define the number of explanatory variables and observations","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"using ParametricOptInterface,MathOptInterface,JuMP,HiGHS\nusing TimerOutputs,LinearAlgebra,Random\n\nconst POI = ParametricOptInterface\nconst MOI = MathOptInterface\nconst OPTIMIZER = HiGHS.Optimizer;\n\nconst N_Candidates = 200\nconst N_Observations = 2000\nconst N_Nodes = 200\n\nconst Observations = 1:N_Observations\nconst Candidates = 1:N_Candidates\nconst Nodes = 1:N_Nodes;","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Initialize a random number generator to keep results deterministic","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"rng = Random.MersenneTwister(123);","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Building regressors (explanatory) sinusoids","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"const X = zeros(N_Candidates, N_Observations)\nconst time = [obs / N_Observations * 1 for obs in Observations]\nfor obs in Observations, cand in Candidates\n t = time[obs]\n f = cand\n X[cand, obs] = sin(2 * pi * f * t)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Define coefficients","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"β = zeros(N_Candidates)\nfor i in Candidates\n if rand(rng) <= (1 - i / N_Candidates)^2 && i <= 100\n β[i] = 4 * rand(rng) / i\n end\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Create noisy observations","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"const y = X' * β .+ 0.1 * randn(rng, N_Observations)","category":"page"},{"location":"Examples/benders/#Benders-Decomposition","page":"Benders Quantile Regression","title":"Benders Decomposition","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Benders decomposition is used to solve large optimization problems with some special characteristics. LP's can be solved with classical linear optimization methods such as the Simplex method or Interior point methods provided by solvers like HiGHS. However, these methods do not scale linearly with the problem size. In the Benders decomposition framework we break the problem in two pieces: A outer and a inner problem. Of course some variables will belong to both problems, this is where the cleverness of Benders kicks in: The outer problem is solved and passes the shared variables to the inner. The inner problem is solved with the shared variables FIXED to the values given by the outer problem. The solution of the inner problem can be used to generate a constraint to the outer problem to describe the linear approximation of the cost function of the shared variables. In many cases, like stochastic programming, the inner problems have a interesting structure and might be broken in smaller problem to be solved in parallel.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"We will descibe the decomposition similarly to what is done in: Introduction to Linear Optimization, Bertsimas & Tsitsiklis (Chapter 6.5): Where the problem in question has the form","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x y_k c^T x + f_1^T y_1 + dots + f_n^T y_n notag \n textsubject to Ax = b notag \n B_1 x + D_1 y_1 = d_1 notag \n dots dots notag \n B_n x + D_n y_n = d_n notag \n x y_1 y_n geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/#Inner-Problem","page":"Benders Quantile Regression","title":"Inner Problem","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Given a solution for the x variables we can define the inner problem as","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n z_k(x) = min_y_k f_k^T y_k notag \n textsubject to D_k y_k = d_k - B_k x notag \n y_k geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The z_k(x) function represents the cost of the subproblem given a solution for x. This function is a convex function because x affects only the right hand side of the problem (this is a standard results in LP theory).","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"For the special case of the Norm-1 reggression the problem is written as:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\nz_k(beta) = min_varepsilon^up varepsilon^dw sum_i in ObsSet(k) varepsilon^up_i + varepsilon^dw_i notag \n textsubject to varepsilon^up_i geq + y_i - sum_j in Candidates beta_j x_ij forall i in ObsSet(k) notag \n varepsilon^dw_i geq - y_i + sum_j in Candidates beta_j x_ij forall i in ObsSet(k) notag \n varepsilon^up_i varepsilon^dw_i geq 0 forall i in ObsSet(k) notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The collection ObsSet(k) is a sub-set of the N_Observations. Any partition of the N_Observations collection is valid. In this example we will partition with the function:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function ObsSet(K)\n obs_per_block = div(N_Observations, N_Nodes)\n return (1+(K-1)*obs_per_block):(K*obs_per_block)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Which can be written in POI as follows:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function inner_model(K)\n\n # initialize the POI model\n inner = direct_model(POI.Optimizer(OPTIMIZER()))\n\n # Define local optimization variables for norm-1 error\n @variables(inner, begin\n ɛ_up[ObsSet(K)] >= 0\n ɛ_dw[ObsSet(K)] >= 0\n end)\n\n # create the regression coefficient representation\n # Create parameters\n β = [@variable(inner, set = MOI.Parameter(0.0)) for i in 1:N_Candidates]\n for (i, βi) in enumerate(β)\n set_name(βi, \"β[$i]\")\n end\n\n # create local constraints\n # Note that *parameter* algebra is implemented just like variables\n # algebra. We can multiply parameters by constants, add parameters,\n # sum parameters and variables and so on.\n @constraints(\n inner,\n begin\n ɛ_up_ctr[i in ObsSet(K)],\n ɛ_up[i] >= +sum(X[j, i] * β[j] for j in Candidates) - y[i]\n ɛ_dw_ctr[i in ObsSet(K)],\n ɛ_dw[i] >= -sum(X[j, i] * β[j] for j in Candidates) + y[i]\n end\n )\n\n # create local objective function\n @objective(inner, Min, sum(ɛ_up[i] + ɛ_dw[i] for i in ObsSet(K)))\n\n # return the correct group of parameters\n return (inner, β)\nend","category":"page"},{"location":"Examples/benders/#Outer-Problem","page":"Benders Quantile Regression","title":"Outer Problem","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now that all pieces of the original problem can be representad by the convex z_k(x) functions we can recast the problem in the the equivalent form:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x c^T x + z_1(x) + dots + z_n(x) notag \n textsubject to Ax = b notag \n x geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"However we cannot pass a problem in this form to a linear programming solver (it could be passed to other kinds of solvers).","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Another standart result of optimization theory is that a convex function can be represented by its supporting hyper-planes:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n z_k(x) = min_z x z notag \n textsubject to z geq pi_k(hatx) (x - hatx) + z_k(hatx) forall hatx in dom(z_k) notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Then we can re-write (again) the outer problem as","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x z_k c^T x + z_1 + dots + z_n notag \n textsubject to z_i geq pi_i(hatx) (x - hatx) + z_i(hatx) forall hatx in dom(z_i) i in 1 dots n notag \n Ax = b notag \n x geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Which is a linear program! However, it has infinitely many constraints !!","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"We can relax the infinite constraints and write:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_x z_k c^T x + z_1 + dots + z_n notag \n textsubject to Ax = b notag \n x geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"But now its only an underestimated problem. In the case of our problem it can be written as:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"beginalign\n min_varepsilon beta sum_i in Nodes varepsilon_i notag \n textsubject to varepsilon_i geq 0 notag \nendalign","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"This model can be written in JuMP:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function outer_model()\n outer = Model(OPTIMIZER)\n @variables(outer, begin\n ɛ[Nodes] >= 0\n β[1:N_Candidates]\n end)\n @objective(outer, Min, sum(ɛ[i] for i in Nodes))\n sol = zeros(N_Candidates)\n return (outer, ɛ, β, sol)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The method to solve the outer problem and query its solution is given here:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function outer_solve(outer_model)\n model = outer_model[1]\n β = outer_model[3]\n optimize!(model)\n return (value.(β), objective_value(model))\nend","category":"page"},{"location":"Examples/benders/#Supporting-Hyperplanes","page":"Benders Quantile Regression","title":"Supporting Hyperplanes","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"With these building blocks in hand, we can start building the algorithm. So far we know how to:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Solve the relaxed outer problem\nObtain the solution for the hatx (or beta in our case)","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now we can:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Fix the values of hatx in the inner problems\nSolve the inner problems\nquery the solution of the inner problems to obtain the supporting hyperplane","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"the value of z_k(hatx), which is the objective value of the inner problem","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"and the derivative pi_k(hatx) = fracd z_k(x)d x Big_x = hatx The derivative is the dual variable associated to the variable hatx, which results by applying the chain rule on the constraints duals. These new steps are executed by the function:","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function inner_solve(model, outer_solution)\n β0 = outer_solution[1]\n inner = model[1]\n\n # The first step is to fix the values given by the outer problem\n @timeit \"fix\" begin\n β = model[2]\n MOI.set.(inner, POI.ParameterValue(), β, β0)\n end\n\n # here the inner problem is solved\n @timeit \"opt\" optimize!(inner)\n\n # query dual variables, which are sensitivities\n # They represent the subgradient (almost a derivative)\n # of the objective function for infinitesimal variations\n # of the constants in the linear constraints\n # POI: we can query dual values of *parameters*\n π = MOI.get.(inner, POI.ParameterDual(), β)\n\n # π2 = shadow_price.(β_fix)\n obj = objective_value(inner)\n rhs = obj - dot(π, β0)\n return (rhs, π, obj)\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now that we have cutting plane in hand we can add them to the outer problem","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function outer_add_cut(outer_model, cut_info, node)\n outer = outer_model[1]\n ɛ = outer_model[2]\n β = outer_model[3]\n\n rhs = cut_info[1]\n π = cut_info[2]\n\n @constraint(outer, ɛ[node] >= sum(π[j] * β[j] for j in Candidates) + rhs)\nend","category":"page"},{"location":"Examples/benders/#Algorithm-wrap-up","page":"Benders Quantile Regression","title":"Algorithm wrap up","text":"","category":"section"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The complete algorithm is","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Solve the relaxed master problem\nObtain the solution for the hatx (or beta in our case)\nFix the values of hatx in the slave problems\nSolve the slave problem\nquery the solution of the slave problem to obtain the supporting hyperplane\nadd hyperplane to master problem\nrepeat","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Now we grab all the pieces that we built and we write the benders algorithm by calling the above function in a proper order.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"The macros @timeit are use to time each step of the algorithm.","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"function decomposed_model(;print_timer_outputs::Bool = true)\n reset_timer!() # reset timer fo comparision\n time_init = @elapsed @timeit \"Init\" begin\n # Create the outer problem with no cuts\n @timeit \"outer\" outer = outer_model()\n\n # initialize solution for the regression coefficients in zero\n @timeit \"Sol\" solution = (zeros(N_Candidates), Inf)\n best_sol = deepcopy(solution)\n\n # Create the inner problems\n @timeit \"inners\" inners =\n [inner_model(i) for i in Candidates]\n\n # Save initial version of the inner problems and create\n # the first set of cuts\n @timeit \"Cuts\" cuts =\n [inner_solve(inners[i], solution) for i in Candidates]\n end\n\n UB = +Inf\n LB = -Inf\n\n # println(\"Initialize Iterative step\")\n time_loop = @elapsed @timeit \"Loop\" for k in 1:80\n\n # Add cuts generated from each inner problem to the outer problem\n @timeit \"add cuts\" for i in Candidates\n outer_add_cut(outer, cuts[i], i)\n end\n\n # Solve the outer problem with the new set of cuts\n # Obtain new solution candidate for the regression coefficients\n @timeit \"solve outer\" solution = outer_solve( outer)\n\n # Pass the new candidate solution to each of the inner problems\n # Solve the inner problems and obtain cutting planes\n @timeit \"solve nodes\" for i in Candidates\n cuts[i] = inner_solve( inners[i], solution)\n end\n\n LB = solution[2]\n new_UB = sum(cuts[i][3] for i in Candidates)\n if new_UB <= UB\n best_sol = deepcopy(solution)\n end\n UB = min(UB, new_UB)\n\n if abs(UB - LB) / (abs(UB) + abs(LB)) < 0.05\n break\n end\n end\n\n print_timer_outputs && print_timer()\n\n return best_sol[1]\nend","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"Run benders decomposition with POI","category":"page"},{"location":"Examples/benders/","page":"Benders Quantile Regression","title":"Benders Quantile Regression","text":"β2 = decomposed_model(; print_timer_outputs = false);\nGC.gc()\nβ2 = decomposed_model();","category":"page"}] }