diff --git a/examples/HiGHS_example.jl b/examples/HiGHS_example.jl index e935364ae..043d77f3c 100644 --- a/examples/HiGHS_example.jl +++ b/examples/HiGHS_example.jl @@ -24,11 +24,11 @@ end lmo = FrankWolfe.MathOptLMO(o) function f(x) - return 0.5 * sum((x.-diffw).^2) + return 0.5 * sum((x .- diffw) .^ 2) end function grad!(storage, x) - @. storage = x-diffw + @. storage = x - diffw end -x, _, result = Boscia.solve(f, grad!, lmo, verbose = true) +x, _, result = Boscia.solve(f, grad!, lmo, verbose=true) diff --git a/examples/approx_planted_point.jl b/examples/approx_planted_point.jl index 9ec4a55d9..d027eab42 100644 --- a/examples/approx_planted_point.jl +++ b/examples/approx_planted_point.jl @@ -41,15 +41,15 @@ diffi = Random.rand(Bool, n) * 0.6 .+ 0.3 @testset "Using Cube LMO" begin int_vars = collect(1:n) - + bounds = Boscia.IntegerBounds() - for i in 1:n + for i in 1:n push!(bounds, (i, 0.0), :greaterthan) push!(bounds, (i, 1.0), :lessthan) end blmo = Boscia.CubeBLMO(n, int_vars, bounds) - x, _, result = Boscia.solve(f, grad!, blmo, verbose =true) + x, _, result = Boscia.solve(f, grad!, blmo, verbose=true) @test x == round.(diffi) @test isapprox(f(x), f(result[:raw_solution]), atol=1e-6, rtol=1e-3) @@ -59,14 +59,15 @@ diffi = Random.rand(Bool, n) * 0.6 .+ 0.3 int_vars = collect(1:n) lbs = zeros(n) ubs = ones(n) - + sblmo = Boscia.CubeSimpleBLMO(lbs, ubs, int_vars) - x, _, result = Boscia.solve(f, grad!, sblmo, lbs[int_vars], ubs[int_vars], int_vars, n, verbose =true) + x, _, result = + Boscia.solve(f, grad!, sblmo, lbs[int_vars], ubs[int_vars], int_vars, n, verbose=true) @test x == round.(diffi) @test isapprox(f(x), f(result[:raw_solution]), atol=1e-6, rtol=1e-3) - end + end end @@ -79,7 +80,7 @@ end @. storage = x - diffi end - int_vars = unique!(rand(collect(1:n), Int(floor(n/2)))) + int_vars = unique!(rand(collect(1:n), Int(floor(n / 2)))) @testset "Using SCIP" begin o = SCIP.Optimizer() @@ -99,37 +100,38 @@ end sol = diffi sol[int_vars] = round.(sol[int_vars]) - @test sum(isapprox.(x, sol, atol =1e-6, rtol=1e-2)) == n + @test sum(isapprox.(x, sol, atol=1e-6, rtol=1e-2)) == n @test isapprox(f(x), f(result[:raw_solution]), atol=1e-6, rtol=1e-3) end - @testset "Using Cube LMO" begin + @testset "Using Cube LMO" begin bounds = Boscia.IntegerBounds() - for i in 1:n + for i in 1:n push!(bounds, (i, 0.0), :greaterthan) push!(bounds, (i, 1.0), :lessthan) end blmo = Boscia.CubeBLMO(n, int_vars, bounds) - x, _, result = Boscia.solve(f, grad!, blmo, verbose =true) + x, _, result = Boscia.solve(f, grad!, blmo, verbose=true) sol = diffi sol[int_vars] = round.(sol[int_vars]) - @test sum(isapprox.(x, sol, atol =1e-6, rtol=1e-2)) == n + @test sum(isapprox.(x, sol, atol=1e-6, rtol=1e-2)) == n @test isapprox(f(x), f(result[:raw_solution]), atol=1e-6, rtol=1e-3) end @testset "Using Cube Simple LMO" begin lbs = zeros(n) ubs = ones(n) - + sblmo = Boscia.CubeSimpleBLMO(lbs, ubs, int_vars) - x, _, result = Boscia.solve(f, grad!, sblmo, lbs[int_vars], ubs[int_vars], int_vars, n, verbose =true) + x, _, result = + Boscia.solve(f, grad!, sblmo, lbs[int_vars], ubs[int_vars], int_vars, n, verbose=true) sol = diffi sol[int_vars] = round.(sol[int_vars]) - @test sum(isapprox.(x, sol, atol =1e-6, rtol=1e-2)) == n + @test sum(isapprox.(x, sol, atol=1e-6, rtol=1e-2)) == n @test isapprox(f(x), f(result[:raw_solution]), atol=1e-6, rtol=1e-3) end -end +end diff --git a/examples/mps-example.jl b/examples/mps-example.jl index 3097ee00d..6ea84f0ed 100644 --- a/examples/mps-example.jl +++ b/examples/mps-example.jl @@ -60,4 +60,3 @@ end x, _, result = Boscia.solve(f, grad!, lmo, verbose=true) @test f(x) <= f(result[:raw_solution]) end - diff --git a/examples/mps-examples/mip-examples.jl b/examples/mps-examples/mip-examples.jl index df2033fb6..173dfd089 100644 --- a/examples/mps-examples/mip-examples.jl +++ b/examples/mps-examples/mip-examples.jl @@ -45,7 +45,7 @@ function build_example(example, num_v) MOI.set(o, MOI.RawOptimizerAttribute("presolving/maxrounds"), 0) #trick to push the optimum towards the interior - vs = [FrankWolfe.compute_extreme_point(lmo, randn(n)) for _ in 1:num_v] + vs = [FrankWolfe.compute_extreme_point(lmo, randn(n)) for _ in 1:num_v] # done to avoid one vertex being systematically selected unique!(vs) @@ -62,10 +62,10 @@ function build_example(example, num_v) end function grad!(storage, x) - mul!(storage, length(vs)/max_norm * I, x) + mul!(storage, length(vs) / max_norm * I, x) storage .+= b_mps for v in vs - @. storage -= 1/max_norm * v + @. storage -= 1 / max_norm * v end end @@ -94,7 +94,15 @@ test_instance = string("MPS ", example, " instance") @testset "$test_instance" begin println("Example $(example)") lmo, f, grad! = build_example(example, num_v) - x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, print_iter = 10, fw_epsilon = 1e-1, min_node_fw_epsilon = 1e-3, time_limit=600) + x, _, result = Boscia.solve( + f, + grad!, + lmo, + verbose=true, + print_iter=10, + fw_epsilon=1e-1, + min_node_fw_epsilon=1e-3, + time_limit=600, + ) @test f(x) <= f(result[:raw_solution]) end - diff --git a/ext/BosciaHiGHSExt.jl b/ext/BosciaHiGHSExt.jl index 513295183..dcd974d2e 100644 --- a/ext/BosciaHiGHSExt.jl +++ b/ext/BosciaHiGHSExt.jl @@ -7,5 +7,5 @@ const MOI = MathOptInterface const MOIU = MOI.Utilities import MathOptSetDistances as MOD - + end # module diff --git a/ext/BosciaSCIPExt.jl b/ext/BosciaSCIPExt.jl index ca0b59b90..83dd12cbd 100644 --- a/ext/BosciaSCIPExt.jl +++ b/ext/BosciaSCIPExt.jl @@ -12,7 +12,12 @@ import MathOptSetDistances as MOD Finds the best solution in the SCIP solution storage, based on the objective function `f`. Returns the solution vector and the corresponding best value. """ -function Boscia.find_best_solution(f::Function, o::SCIP.Optimizer, vars::Vector{MOI.VariableIndex}, domain_oracle) +function Boscia.find_best_solution( + f::Function, + o::SCIP.Optimizer, + vars::Vector{MOI.VariableIndex}, + domain_oracle, +) sols_vec = unsafe_wrap(Vector{Ptr{Cvoid}}, SCIP.LibSCIP.SCIPgetSols(o), SCIP.LibSCIP.SCIPgetNSols(o)) best_val = Inf @@ -35,7 +40,7 @@ end Cleanup internal SCIP model """ function Boscia.free_model(o::SCIP.Optimizer) - SCIP.SCIPfreeTransform(o) + return SCIP.SCIPfreeTransform(o) end """ @@ -44,5 +49,5 @@ Get solving tolerance. function Boscia.get_tol(o::SCIP.Optimizer) return MOI.get(o, MOI.RawOptimizerAttribute("numerics/feastol")) end - + end # module diff --git a/src/MOI_bounded_oracle.jl b/src/MOI_bounded_oracle.jl index 4826098f7..b94ed1687 100644 --- a/src/MOI_bounded_oracle.jl +++ b/src/MOI_bounded_oracle.jl @@ -23,7 +23,7 @@ Convert object of Type MathOptLMO into MathOptBLMO and viceversa. function Base.convert(::Type{MathOptBLMO}, lmo::FrankWolfe.MathOptLMO) return MathOptBLMO(lmo.o, lmo.use_modify) end -function Base.convert(::Type{FrankWolfe.MathOptLMO}, blmo::MathOptBLMO) +function Base.convert(::Type{FrankWolfe.MathOptLMO}, blmo::MathOptBLMO) return FrankWolfe.MathOptLMO(blmo.o, blmo.use_modify) end @@ -45,7 +45,7 @@ end Get list of variables indices and the total number of variables. If the problem has n variables, they are expected to contiguous and ordered from 1 to n. """ -function Boscia.get_list_of_variables(blmo::MathOptBLMO) +function Boscia.get_list_of_variables(blmo::MathOptBLMO) v_indices = MOI.get(blmo.o, MOI.ListOfVariableIndices()) n = length(v_indices) if v_indices != MOI.VariableIndex.(1:n) @@ -57,27 +57,30 @@ end """ Get list of binary and integer variables, respectively. """ -function get_binary_variables(blmo::MathOptBLMO) +function get_binary_variables(blmo::MathOptBLMO) return MOI.get(blmo.o, MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.ZeroOne}()) end -function Boscia.get_integer_variables(blmo::MathOptBLMO) +function Boscia.get_integer_variables(blmo::MathOptBLMO) bin_var = get_binary_variables(blmo) int_var = MOI.get(blmo.o, MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.Integer}()) return vcat(getproperty.(int_var, :value), getproperty.(bin_var, :value)) -end +end """ Get the index of the integer variable the bound is working on. """ -function Boscia.get_int_var(blmo::MathOptBLMO, c_idx) +function Boscia.get_int_var(blmo::MathOptBLMO, c_idx) return c_idx.value end """ Get the list of lower bounds. """ -function Boscia.get_lower_bound_list(blmo::MathOptBLMO) - return MOI.get(blmo.o, MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.GreaterThan{Float64}}()) +function Boscia.get_lower_bound_list(blmo::MathOptBLMO) + return MOI.get( + blmo.o, + MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.GreaterThan{Float64}}(), + ) end """ @@ -85,12 +88,12 @@ Get the list of upper bounds. """ function Boscia.get_upper_bound_list(blmo::MathOptBLMO) return MOI.get(blmo.o, MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.LessThan{Float64}}()) -end +end """ Change the value of the bound c_idx. """ -function Boscia.set_bound!(blmo::MathOptBLMO, c_idx, value, sense::Symbol) +function Boscia.set_bound!(blmo::MathOptBLMO, c_idx, value, sense::Symbol) if sense == :lessthan MOI.set(blmo.o, MOI.ConstraintSet(), c_idx, MOI.LessThan(value)) elseif sense == :greaterthan @@ -110,21 +113,21 @@ end """ Check if the subject of the bound c_idx is an integer variable (recorded in int_vars). """ -function Boscia.is_constraint_on_int_var(blmo::MathOptBLMO, c_idx, int_vars) +function Boscia.is_constraint_on_int_var(blmo::MathOptBLMO, c_idx, int_vars) return c_idx.value in int_vars end """ To check if there is bound for the variable in the global or node bounds. """ -function Boscia.is_bound_in(blmo::MathOptBLMO, c_idx, bounds) +function Boscia.is_bound_in(blmo::MathOptBLMO, c_idx, bounds) return haskey(bounds, c_idx.value) end """ Delete bounds. """ -function Boscia.delete_bounds!(blmo::MathOptBLMO, cons_delete) +function Boscia.delete_bounds!(blmo::MathOptBLMO, cons_delete) for (d_idx, _) in cons_delete MOI.delete(blmo.o, d_idx) end @@ -136,7 +139,7 @@ Add bound constraint. function Boscia.add_bound_constraint!(blmo::MathOptBLMO, key, value, sense::Symbol) if sense == :lessthan MOI.add_constraint(blmo.o, MOI.VariableIndex(key), MOI.LessThan(value)) - elseif sense == :greaterthan + elseif sense == :greaterthan MOI.add_constraint(blmo.o, MOI.VariableIndex(key), MOI.GreaterThan(value)) end end @@ -144,11 +147,8 @@ end """ Has variable a binary constraint? """ -function has_binary_constraint(blmo::MathOptBLMO, idx::Int) - consB_list = MOI.get( - blmo.o, - MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.ZeroOne}(), - ) +function has_binary_constraint(blmo::MathOptBLMO, idx::Int) + consB_list = MOI.get(blmo.o, MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.ZeroOne}()) for c_idx in consB_list if c_idx.value == idx return true, c_idx @@ -160,11 +160,8 @@ end """ Does the variable have an integer constraint? """ -function Boscia.has_integer_constraint(blmo::MathOptBLMO, idx::Int) - consB_list = MOI.get( - blmo.o, - MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.Integer}(), - ) +function Boscia.has_integer_constraint(blmo::MathOptBLMO, idx::Int) + consB_list = MOI.get(blmo.o, MOI.ListOfConstraintIndices{MOI.VariableIndex,MOI.Integer}()) for c_idx in consB_list if c_idx.value == idx return true, c_idx @@ -200,7 +197,7 @@ function is_linear_feasible_subroutine(o::MOI.ModelLike, ::Type{F}, ::Type{S}, v func = MOI.get(o, MOI.ConstraintFunction(), c_idx) val = MOIU.eval_variables(valvar, func) set = MOI.get(o, MOI.ConstraintSet(), c_idx) - # @debug("Constraint: $(F)-$(S) $(func) = $(val) in $(set)") + # @debug("Constraint: $(F)-$(S) $(func) = $(val) in $(set)") dist = MOD.distance_to_set(MOD.DefaultDistance(), val, set) solve_tol = get_tol(o) if dist > 5000.0 * solve_tol @@ -230,7 +227,7 @@ function explicit_bounds_binary_var(blmo::MathOptBLMO, global_bounds::Boscia.Int end global_bounds[idx.value, :greaterthan] = 0.0 global_bounds[idx.value, :lessthan] = 1.0 - end + end end """ @@ -262,7 +259,7 @@ function Boscia.build_global_bounds(blmo::MathOptBLMO, integer_variables) push!(global_bounds, (idx, s.upper), :lessthan) end @assert !MOI.is_valid(blmo.o, cidx) - end + end explicit_bounds_binary_var(blmo, global_bounds) return global_bounds end @@ -277,7 +274,7 @@ Safety check only. function Boscia.build_LMO_correct(blmo, node_bounds) for list in (node_bounds.lower_bounds, node_bounds.upper_bounds) for (idx, set) in list - c_idx = MOI.ConstraintIndex{MOI.VariableIndex, typeof(set)}(idx) + c_idx = MOI.ConstraintIndex{MOI.VariableIndex,typeof(set)}(idx) @assert MOI.is_valid(blmo.o, c_idx) set2 = MOI.get(blmo.o, MOI.ConstraintSet(), c_idx) if !(set == set2) @@ -294,11 +291,11 @@ end Free model data from previous solve (if necessary). """ function Boscia.free_model(blmo) - free_model(blmo.o) + return free_model(blmo.o) end # no-op by default -function free_model(o::MOI.AbstractOptimizer) +function free_model(o::MOI.AbstractOptimizer) return true end @@ -326,11 +323,9 @@ function Boscia.is_valid_split(tree::Bonobo.BnBTree, blmo::MathOptBLMO, vidx::In l_idx = MOI.ConstraintIndex{MOI.VariableIndex,MOI.GreaterThan{Float64}}(vidx) u_idx = MOI.ConstraintIndex{MOI.VariableIndex,MOI.LessThan{Float64}}(vidx) l_bound = - MOI.is_valid(blmo.o, l_idx) ? - MOI.get(blmo.o, MOI.ConstraintSet(), l_idx) : nothing + MOI.is_valid(blmo.o, l_idx) ? MOI.get(blmo.o, MOI.ConstraintSet(), l_idx) : nothing u_bound = - MOI.is_valid(blmo.o, u_idx) ? - MOI.get(blmo.o, MOI.ConstraintSet(), u_idx) : nothing + MOI.is_valid(blmo.o, u_idx) ? MOI.get(blmo.o, MOI.ConstraintSet(), u_idx) : nothing if (l_bound !== nothing && u_bound !== nothing && l_bound.lower === u_bound.upper) @debug l_bound.lower, u_bound.upper return false @@ -356,10 +351,10 @@ end """ Is a given point v indicator feasible, i.e. meets the indicator constraints? If applicable. """ -function Boscia.is_indicator_feasible(blmo::MathOptBLMO, v; atol= 1e-6, rtol=1e-6) +function Boscia.is_indicator_feasible(blmo::MathOptBLMO, v; atol=1e-6, rtol=1e-6) return is_indicator_feasible(blmo.o, v; atol, rtol) end -function is_indicator_feasible(o, x; atol = 1e-6, rtol=1e-6) +function is_indicator_feasible(o, x; atol=1e-6, rtol=1e-6) valvar(f) = x[f.value] for (F, S) in MOI.get(o, MOI.ListOfConstraintTypesPresent()) if S <: MOI.Indicator @@ -368,7 +363,7 @@ function is_indicator_feasible(o, x; atol = 1e-6, rtol=1e-6) func = MOI.get(o, MOI.ConstraintFunction(), c_idx) val = MOIU.eval_variables(valvar, func) set = MOI.get(o, MOI.ConstraintSet(), c_idx) - # @debug("Constraint: $(F)-$(S) $(func) = $(val) in $(set)") + # @debug("Constraint: $(F)-$(S) $(func) = $(val) in $(set)") dist = MOD.distance_to_set(MOD.DefaultDistance(), val, set) if dist > atol @debug("Constraint: $(F)-$(S) $(func) = $(val) in $(set)") @@ -407,14 +402,19 @@ end Find best solution from the solving process. """ function Boscia.find_best_solution(f::Function, blmo::MathOptBLMO, vars, domain_oracle) - return find_best_solution(f, blmo.o, vars, domain_oracle) + return find_best_solution(f, blmo.o, vars, domain_oracle) end """ Finds the best solution in the Optimizer's solution storage, based on the objective function `f`. Returns the solution vector and the corresponding best value. """ -function find_best_solution(f::Function, o::MOI.AbstractOptimizer, vars::Vector{MOI.VariableIndex}, domain_oracle) +function find_best_solution( + f::Function, + o::MOI.AbstractOptimizer, + vars::Vector{MOI.VariableIndex}, + domain_oracle, +) nsols = MOI.get(o, MOI.ResultCount()) @assert nsols > 0 best_val = Inf @@ -449,7 +449,7 @@ function Boscia.check_infeasible_vertex(blmo::MathOptBLMO, tree) node_bounds = node.local_bounds for list in (node_bounds.lower_bounds, node_bounds.upper_bounds) for (idx, set) in list - c_idx = MOI.ConstraintIndex{MOI.VariableIndex, typeof(set)}(idx) + c_idx = MOI.ConstraintIndex{MOI.VariableIndex,typeof(set)}(idx) @assert MOI.is_valid(state.tlmo.blmo.o, c_idx) set2 = MOI.get(state.tlmo.blmo.o, MOI.ConstraintSet(), c_idx) if !(set == set2) @@ -468,7 +468,7 @@ function Bonobo.get_branching_variable( tree::Bonobo.BnBTree, branching::Boscia.PartialStrongBranching{MathOptBLMO{OT}}, node::Bonobo.AbstractNode, -) where OT <: MOI.AbstractOptimizer +) where {OT<:MOI.AbstractOptimizer} xrel = Bonobo.get_relaxed_values(tree, node) max_lowerbound = -Inf max_idx = -1 @@ -611,7 +611,7 @@ function Boscia.solve( branching_strategy=Bonobo.MOST_INFEASIBLE(), variant::Boscia.FrankWolfeVariant=Boscia.BPCG(), line_search::FrankWolfe.LineSearchMethod=FrankWolfe.Adaptive(), - active_set::Union{Nothing, FrankWolfe.ActiveSet} = nothing, + active_set::Union{Nothing,FrankWolfe.ActiveSet}=nothing, fw_epsilon=1e-2, verbose=false, dual_gap=1e-6, @@ -629,38 +629,41 @@ function Boscia.solve( global_dual_tightening=true, bnb_callback=nothing, strong_convexity=0.0, - domain_oracle= x->true, + domain_oracle=x -> true, start_solution=nothing, - fw_verbose = false, - kwargs... + fw_verbose=false, + kwargs..., ) blmo = convert(MathOptBLMO, lmo) - return Boscia.solve(f, g, blmo; - traverse_strategy=traverse_strategy, - branching_strategy=branching_strategy, - variant=variant, - line_search=line_search, - active_set=active_set, - fw_epsilon=fw_epsilon, - verbose=verbose, - dual_gap=dual_gap, - rel_dual_gap=rel_dual_gap, - time_limit=time_limit, - print_iter=print_iter, - dual_gap_decay_factor=dual_gap_decay_factor, - max_fw_iter=max_fw_iter, - min_number_lower=min_number_lower, - min_node_fw_epsilon=min_node_fw_epsilon, - use_postsolve=use_postsolve, - min_fw_iterations=min_fw_iterations, - max_iteration_post=max_iteration_post, - dual_tightening=dual_tightening, - global_dual_tightening=global_dual_tightening, - bnb_callback=bnb_callback, - strong_convexity=strong_convexity, - domain_oracle=domain_oracle, - start_solution=start_solution, - fw_verbose=fw_verbose, - kwargs... + return Boscia.solve( + f, + g, + blmo; + traverse_strategy=traverse_strategy, + branching_strategy=branching_strategy, + variant=variant, + line_search=line_search, + active_set=active_set, + fw_epsilon=fw_epsilon, + verbose=verbose, + dual_gap=dual_gap, + rel_dual_gap=rel_dual_gap, + time_limit=time_limit, + print_iter=print_iter, + dual_gap_decay_factor=dual_gap_decay_factor, + max_fw_iter=max_fw_iter, + min_number_lower=min_number_lower, + min_node_fw_epsilon=min_node_fw_epsilon, + use_postsolve=use_postsolve, + min_fw_iterations=min_fw_iterations, + max_iteration_post=max_iteration_post, + dual_tightening=dual_tightening, + global_dual_tightening=global_dual_tightening, + bnb_callback=bnb_callback, + strong_convexity=strong_convexity, + domain_oracle=domain_oracle, + start_solution=start_solution, + fw_verbose=fw_verbose, + kwargs..., ) -end \ No newline at end of file +end diff --git a/src/blmo_interface.jl b/src/blmo_interface.jl index 7cb618e88..be6c4a6cd 100644 --- a/src/blmo_interface.jl +++ b/src/blmo_interface.jl @@ -3,7 +3,7 @@ Supertype for the Bounded Linear Minimization Oracles """ -abstract type BoundedLinearMinimizationOracle <: FrankWolfe.LinearMinimizationOracle end +abstract type BoundedLinearMinimizationOracle <: FrankWolfe.LinearMinimizationOracle end """ Enum encoding the status of the Bounded Linear Minimization Oracle. @@ -41,7 +41,7 @@ function get_list_of_variables end """ Get list of integer variables. """ -function get_integer_variables end +function get_integer_variables end """ Get the index of the integer variable the bound is working on. @@ -56,7 +56,7 @@ function get_lower_bound_list end """ Get the list of upper bounds. """ -function get_upper_bound_list end +function get_upper_bound_list end """ Read bound value for c_idx. @@ -136,7 +136,7 @@ end """ Is a given point v indicator feasible, i.e. meets the indicator constraints? If applicable. """ -function is_indicator_feasible(blmo::BoundedLinearMinimizationOracle, v; atol= 1e-6, rtol=1e-6) +function is_indicator_feasible(blmo::BoundedLinearMinimizationOracle, v; atol=1e-6, rtol=1e-6) return true end @@ -150,8 +150,7 @@ end """ Deal with infeasible vertex if necessary, e.g. check what caused it etc. """ -function check_infeasible_vertex(blmo::BoundedLinearMinimizationOracle, tree) -end +function check_infeasible_vertex(blmo::BoundedLinearMinimizationOracle, tree) end ## Utility diff --git a/src/build_lmo.jl b/src/build_lmo.jl index a2e5b8508..426b399a5 100644 --- a/src/build_lmo.jl +++ b/src/build_lmo.jl @@ -28,13 +28,13 @@ function build_LMO( # Change if is_bound_in(blmo, c_idx, node_bounds.lower_bounds) set_bound!(blmo, c_idx, node_bounds.lower_bounds[v_idx], :greaterthan) - # Keep + # Keep else set_bound!(blmo, c_idx, global_bounds.lower_bounds[v_idx], :greaterthan) end else # Delete - push!(cons_delete, (c_idx, :greaterthan)) + push!(cons_delete, (c_idx, :greaterthan)) end end end @@ -47,13 +47,13 @@ function build_LMO( # Change if is_bound_in(blmo, c_idx, node_bounds.upper_bounds) set_bound!(blmo, c_idx, node_bounds.upper_bounds[v_idx], :lessthan) - # Keep + # Keep else set_bound!(blmo, c_idx, global_bounds.upper_bounds[v_idx], :lessthan) end else # Delete - push!(cons_delete, (c_idx, :lessthan)) + push!(cons_delete, (c_idx, :lessthan)) end end end @@ -74,7 +74,7 @@ function build_LMO( end end - build_LMO_correct(blmo, node_bounds) + return build_LMO_correct(blmo, node_bounds) end build_LMO(tlmo::TimeTrackingLMO, gb::IntegerBounds, nb::IntegerBounds, int_vars::Vector{Int64}) = diff --git a/src/callbacks.jl b/src/callbacks.jl index 2c582b4b8..a7e70a473 100644 --- a/src/callbacks.jl +++ b/src/callbacks.jl @@ -1,5 +1,11 @@ # FW callback -function build_FW_callback(tree, min_number_lower, check_rounding_value::Bool, fw_iterations, min_fw_iterations) +function build_FW_callback( + tree, + min_number_lower, + check_rounding_value::Bool, + fw_iterations, + min_fw_iterations, +) vars = get_variables_pointers(tree.root.problem.tlmo.blmo, tree) # variable to only fetch heuristics when the counter increases ncalls = -1 @@ -17,15 +23,19 @@ function build_FW_callback(tree, min_number_lower, check_rounding_value::Bool, f if state.lmo !== nothing # can happen with using Blended Conditional Gradient if ncalls != state.lmo.ncalls ncalls = state.lmo.ncalls - (best_v, best_val) = - find_best_solution(tree.root.problem.f, tree.root.problem.tlmo.blmo, vars, tree.root.options[:domain_oracle]) + (best_v, best_val) = find_best_solution( + tree.root.problem.f, + tree.root.problem.tlmo.blmo, + vars, + tree.root.options[:domain_oracle], + ) if best_val < tree.incumbent tree.root.updated_incumbent[] = true node = tree.nodes[tree.root.current_node_id[]] sol = FrankWolfeSolution(best_val, best_v, node, :Solver) push!(tree.solutions, sol) if tree.incumbent_solution === nothing || - sol.objective < tree.incumbent_solution.objective + sol.objective < tree.incumbent_solution.objective tree.incumbent_solution = sol end tree.incumbent = best_val @@ -34,7 +44,9 @@ function build_FW_callback(tree, min_number_lower, check_rounding_value::Bool, f end end - if (state.primal - state.dual_gap > tree.incumbent + 1e-2) && tree.num_nodes != 1 && state.t > min_fw_iterations + if (state.primal - state.dual_gap > tree.incumbent + 1e-2) && + tree.num_nodes != 1 && + state.t > min_fw_iterations return false end @@ -47,7 +59,7 @@ function build_FW_callback(tree, min_number_lower, check_rounding_value::Bool, f sol = FrankWolfeSolution(val, copy(state.v), node, :vertex) push!(tree.solutions, sol) if tree.incumbent_solution === nothing || - sol.objective < tree.incumbent_solution.objective + sol.objective < tree.incumbent_solution.objective tree.incumbent_solution = sol end tree.incumbent = val @@ -77,7 +89,8 @@ function build_FW_callback(tree, min_number_lower, check_rounding_value::Bool, f x_rounded[idx] = round(state.x[idx]) end # check linear feasibility - if is_linear_feasible(tree.root.problem.tlmo, x_rounded) && is_integer_feasible(tree, x_rounded) + if is_linear_feasible(tree.root.problem.tlmo, x_rounded) && + is_integer_feasible(tree, x_rounded) # evaluate f(rounded) val = tree.root.problem.f(x_rounded) if val < tree.incumbent diff --git a/src/cube_blmo.jl b/src/cube_blmo.jl index bb8e2c7ec..9ac7f8fec 100644 --- a/src/cube_blmo.jl +++ b/src/cube_blmo.jl @@ -43,7 +43,7 @@ end ## Read information from problem function get_list_of_variables(blmo::CubeBLMO) return blmo.n, collect(1:blmo.n) - end +end # Get list of integer variables, respectively. function get_integer_variables(blmo::CubeBLMO) @@ -95,7 +95,7 @@ end function add_bound_constraint!(blmo::CubeBLMO, key, value, sense::Symbol) # Should not be necessary - error("Trying to add bound constraints of the cube!") + return error("Trying to add bound constraints of the cube!") end ## Checks @@ -110,8 +110,13 @@ end function is_linear_feasible(blmo::CubeBLMO, v::AbstractVector) for i in eachindex(v) - if !(blmo.bounds[i, :greaterthan] ≤ v[i] + 1e-6 || !(v[i] - 1e-6 ≤ blmo.bounds[i, :lessthan])) - @debug("Vertex entry: $(v[i]) Lower bound: $(blmo.bounds[i, :greaterthan]) Upper bound: $(blmo.bounds[i, :lessthan]))") + if !( + blmo.bounds[i, :greaterthan] ≤ v[i] + 1e-6 || + !(v[i] - 1e-6 ≤ blmo.bounds[i, :lessthan]) + ) + @debug( + "Vertex entry: $(v[i]) Lower bound: $(blmo.bounds[i, :greaterthan]) Upper bound: $(blmo.bounds[i, :lessthan]))" + ) return false end end @@ -129,12 +134,14 @@ end function build_LMO_correct(blmo::CubeBLMO, node_bounds) for key in keys(node_bounds.lower_bounds) - if !haskey(blmo.bounds, (key, :greaterthan)) || blmo.bounds[key, :greaterthan] != node_bounds[key, :greaterthan] + if !haskey(blmo.bounds, (key, :greaterthan)) || + blmo.bounds[key, :greaterthan] != node_bounds[key, :greaterthan] return false end end for key in keys(node_bounds.upper_bounds) - if !haskey(blmo.bounds, (key, :lessthan)) || blmo.bounds[key, :lessthan] != node_bounds[key, :lessthan] + if !haskey(blmo.bounds, (key, :lessthan)) || + blmo.bounds[key, :lessthan] != node_bounds[key, :lessthan] return false end end @@ -179,7 +186,7 @@ function bounded_compute_extreme_point(sblmo::CubeSimpleBLMO, d, lb, ub, int_var for i in eachindex(d) if i in int_vars idx = findfirst(x -> x == i, int_vars) - v[i] = d[i] > 0 ? lb[idx] : ub[idx] + v[i] = d[i] > 0 ? lb[idx] : ub[idx] else v[i] = d[i] > 0 ? sblmo.lower_bounds[i] : sblmo.upper_bounds[i] end @@ -190,7 +197,9 @@ end function is_linear_feasible(sblmo::CubeSimpleBLMO, v) for i in setdiff(eachindex(v), sblmo.int_vars) if !(sblmo.lower_bounds[i] ≤ v[i] + 1e-6 || !(v[i] - 1e-6 ≤ blmo.upper_bounds[i])) - @debug("Vertex entry: $(v[i]) Lower bound: $(blmo.bounds[i, :greaterthan]) Upper bound: $(blmo.bounds[i, :lessthan]))") + @debug( + "Vertex entry: $(v[i]) Lower bound: $(blmo.bounds[i, :greaterthan]) Upper bound: $(blmo.bounds[i, :lessthan]))" + ) return false end end diff --git a/src/frank_wolfe_variants.jl b/src/frank_wolfe_variants.jl index 2dbb50ae7..cd3e3f21c 100644 --- a/src/frank_wolfe_variants.jl +++ b/src/frank_wolfe_variants.jl @@ -38,21 +38,21 @@ struct AwayFrankWolfe <: FrankWolfeVariant end function solve_frank_wolfe( frank_wolfe_variant::AwayFrankWolfe, f, - grad!, + grad!, lmo, - active_set; + active_set; line_search::FrankWolfe.LineSearchMethod=FrankWolfe.Adaptive(), epsilon=1e-7, max_iteration=10000, add_dropped_vertices=false, use_extra_vertex_storage=false, - extra_vertex_storage=nothing, + extra_vertex_storage=nothing, callback=nothing, - lazy=false, + lazy=false, lazy_tolerance=2.0, timeout=Inf, verbose=false, - workspace=nothing + workspace=nothing, ) x, _, primal, dual_gap, _, active_set = FrankWolfe.away_frank_wolfe( f, @@ -63,8 +63,8 @@ function solve_frank_wolfe( max_iteration=max_iteration, line_search=line_search, callback=callback, - lazy=lazy, - lazy_tolerance=lazy_tolerance, + lazy=lazy, + lazy_tolerance=lazy_tolerance, timeout=timeout, add_dropped_vertices=add_dropped_vertices, use_extra_vertex_storage=use_extra_vertex_storage, @@ -86,7 +86,7 @@ struct Blended <: FrankWolfeVariant end function solve_frank_wolfe( frank_wolfe_variant::Blended, f, - grad!, + grad!, lmo, active_set; line_search::FrankWolfe.LineSearchMethod=FrankWolfe.Adaptive(), @@ -94,15 +94,15 @@ function solve_frank_wolfe( max_iteration=10000, add_dropped_vertices=false, use_extra_vertex_storage=false, - extra_vertex_storage=nothing, + extra_vertex_storage=nothing, callback=nothing, - lazy=false, + lazy=false, lazy_tolerance=2.0, timeout=Inf, verbose=false, - workspace=nothing + workspace=nothing, ) - x,_, primal, dual_gap,_, active_set = blended_conditional_gradient( + x, _, primal, dual_gap, _, active_set = blended_conditional_gradient( f, grad!, lmo, @@ -126,13 +126,13 @@ Base.print(io::IO, ::Blended) = print(io, "Blended Conditional Gradient") """ Blended Pairwise Conditional Gradient -""" +""" struct BPCG <: FrankWolfeVariant end function solve_frank_wolfe( frank_wolfe_variant::BPCG, f, - grad!, + grad!, lmo, active_set; line_search::FrankWolfe.LineSearchMethod=FrankWolfe.Adaptive(), @@ -140,13 +140,13 @@ function solve_frank_wolfe( max_iteration=10000, add_dropped_vertices=false, use_extra_vertex_storage=false, - extra_vertex_storage=nothing, + extra_vertex_storage=nothing, callback=nothing, - lazy=false, + lazy=false, lazy_tolerance=2.0, timeout=Inf, verbose=false, - workspace=nothing + workspace=nothing, ) x, _, primal, dual_gap, _, active_set = FrankWolfe.blended_pairwise_conditional_gradient( f, @@ -163,7 +163,7 @@ function solve_frank_wolfe( lazy=lazy, lazy_tolerance=lazy_tolerance, timeout=timeout, - verbose=verbose + verbose=verbose, ) return x, primal, dual_gap, active_set @@ -183,7 +183,7 @@ struct VanillaFrankWolfe <: FrankWolfeVariant end function solve_frank_wolfe( frank_wolfe_variant::VanillaFrankWolfe, f, - grad!, + grad!, lmo, active_set; line_search::FrankWolfe.LineSearchMethod=FrankWolfe.Adaptive(), @@ -191,13 +191,13 @@ function solve_frank_wolfe( max_iteration=10000, add_dropped_vertices=false, use_extra_vertex_storage=false, - extra_vertex_storage=nothing, + extra_vertex_storage=nothing, callback=nothing, - lazy=false, + lazy=false, lazy_tolerance=2.0, timeout=Inf, verbose=false, - workspace=nothing + workspace=nothing, ) # If the flag away_steps is set to false, away_frank_wolfe performs Vanilla. # Observe that the lazy flag is only observed if away_steps is set to true, so it can neglected. @@ -206,7 +206,7 @@ function solve_frank_wolfe( grad!, lmo, active_set, - away_steps=false, + away_steps=false, epsilon=epsilon, max_iteration=max_iteration, line_search=line_search, diff --git a/src/integer_bounds.jl b/src/integer_bounds.jl index 0b63c832d..5cc976e4f 100644 --- a/src/integer_bounds.jl +++ b/src/integer_bounds.jl @@ -8,12 +8,11 @@ Keeps track of the bounds of the integer (binary) variables. `upper_bounds` dictionary of Float64, index is the key. """ mutable struct IntegerBounds - lower_bounds::Dict{Int, Float64} - upper_bounds::Dict{Int, Float64} + lower_bounds::Dict{Int,Float64} + upper_bounds::Dict{Int,Float64} end -IntegerBounds() = - IntegerBounds(Dict{Int, Float64}(), Dict{Int, Float64}()) +IntegerBounds() = IntegerBounds(Dict{Int,Float64}(), Dict{Int,Float64}()) function Base.push!(ib::IntegerBounds, (idx, bound), sense::Symbol) if sense == :greaterthan diff --git a/src/interface.jl b/src/interface.jl index a991595dd..f438a5025 100644 --- a/src/interface.jl +++ b/src/interface.jl @@ -55,9 +55,9 @@ function solve( branching_strategy=Bonobo.MOST_INFEASIBLE(), variant::FrankWolfeVariant=BPCG(), line_search::FrankWolfe.LineSearchMethod=FrankWolfe.Adaptive(), - active_set::Union{Nothing, FrankWolfe.ActiveSet} = nothing, + active_set::Union{Nothing,FrankWolfe.ActiveSet}=nothing, lazy=true, - lazy_tolerance = 2.0, + lazy_tolerance=2.0, fw_epsilon=1e-2, verbose=false, dual_gap=1e-6, @@ -75,9 +75,9 @@ function solve( global_dual_tightening=true, bnb_callback=nothing, strong_convexity=0.0, - domain_oracle= x->true, + domain_oracle=x -> true, start_solution=nothing, - fw_verbose = false, + fw_verbose=false, kwargs..., ) if verbose @@ -160,15 +160,15 @@ function solve( current_node_id=Ref{Int}(0), updated_incumbent=Ref{Bool}(false), global_tightening_rhs=Ref(-Inf), - global_tightening_root_info = ( - lower_bounds = Dict{Int, Tuple{Float64, Float64}}(), - upper_bounds = Dict{Int, Tuple{Float64, Float64}}(), + global_tightening_root_info=( + lower_bounds=Dict{Int,Tuple{Float64,Float64}}(), + upper_bounds=Dict{Int,Tuple{Float64,Float64}}(), ), - global_tightenings = IntegerBounds(), + global_tightenings=IntegerBounds(), options=Dict{Symbol,Any}( :domain_oracle => domain_oracle, :dual_gap => dual_gap, - :dual_gap_decay_factor => dual_gap_decay_factor, + :dual_gap_decay_factor => dual_gap_decay_factor, :dual_tightening => dual_tightening, :fwVerbose => fw_verbose, :global_dual_tightening => global_dual_tightening, @@ -199,17 +199,20 @@ function solve( fw_time=Millisecond(0), global_tightenings=0, local_tightenings=0, - local_potential_tightenings=0, + local_potential_tightenings=0, dual_gap=-Inf, ), ) if start_solution !== nothing if size(start_solution) != size(v) - error("size of starting solution differs from vertices: $(size(start_solution)), $(size(v))") + error( + "size of starting solution differs from vertices: $(size(start_solution)), $(size(v))", + ) end # Sanity check that the provided solution is in fact feasible. - @assert is_linear_feasible(blmo, start_solution) && is_integer_feasible(tree, start_solution) + @assert is_linear_feasible(blmo, start_solution) && + is_integer_feasible(tree, start_solution) node = tree.nodes[1] sol = FrankWolfeSolution(f(start_solution), start_solution, node, :start) push!(tree.solutions, sol) @@ -260,7 +263,7 @@ function solve( local_tightenings, local_potential_tightenings, num_bin, - num_int, + num_int, ) fw_callback = build_FW_callback(tree, min_number_lower, true, fw_iterations, min_fw_iterations) @@ -275,10 +278,11 @@ function solve( # Check solution and polish x_polished = x if x !== nothing - if !is_linear_feasible(tree.root.problem.tlmo, x) + if !is_linear_feasible(tree.root.problem.tlmo, x) error("Reported solution not linear feasbile!") end - if !is_integer_feasible(tree.root.problem.integer_variables, x, atol=1e-16, rtol=1e-16) && x !== nothing + if !is_integer_feasible(tree.root.problem.integer_variables, x, atol=1e-16, rtol=1e-16) && + x !== nothing @info "Polish solution" for i in tree.root.problem.integer_variables x_polished[i] = round(x_polished[i]) @@ -368,7 +372,13 @@ function build_bnb_callback( lb_update=false, ) if baseline_callback !== nothing - baseline_callback(tree, node, worse_than_incumbent=worse_than_incumbent, node_infeasible=node_infeasible, lb_update=lb_update) + baseline_callback( + tree, + node, + worse_than_incumbent=worse_than_incumbent, + node_infeasible=node_infeasible, + lb_update=lb_update, + ) end if !node_infeasible #update lower bound @@ -498,10 +508,11 @@ function build_bnb_callback( # x can be nothing if the user supplied a custom domain oracle and the time limit is reached if x === nothing @assert tree.root.problem.solving_stage == TIME_LIMIT_REACHED - end + end primal_value = x !== nothing ? tree.root.problem.f(x) : Inf # deactivate postsolve if there is no solution - tree.root.options[:usePostsolve] = x === nothing ? false : tree.root.options[:usePostsolve] + tree.root.options[:usePostsolve] = + x === nothing ? false : tree.root.options[:usePostsolve] # TODO: here we need to calculate the actual state @@ -593,15 +604,17 @@ function postsolve(tree, result, time_ref, verbose, max_iteration_post) if primal < tree.incumbent tree.root.updated_incumbent[] = true tree.incumbent = primal - tree.lb = tree.root.problem.solving_stage == OPT_TREE_EMPTY ? primal - dual_gap : tree.lb + tree.lb = + tree.root.problem.solving_stage == OPT_TREE_EMPTY ? primal - dual_gap : tree.lb tree.incumbent_solution.objective = tree.solutions[1].objective = primal tree.incumbent_solution.solution = tree.solutions[1].solution = x - else + else if primal < tree.incumbent && tree.lb > primal - dual_gap @info "tree.lb > primal - dual_gap" - else + else @info "primal >= tree.incumbent" - @assert primal <= tree.incumbent + 1e-3 || isapprox(primal, tree.incumbent, atol =1e-6, rtol=1e-2) + @assert primal <= tree.incumbent + 1e-3 || + isapprox(primal, tree.incumbent, atol=1e-6, rtol=1e-2) end @info "postsolve did not improve the solution" primal = tree.incumbent_solution.objective = tree.solutions[1].objective @@ -636,12 +649,27 @@ function postsolve(tree, result, time_ref, verbose, max_iteration_post) println("\t LMO calls / node: $(tree.root.problem.tlmo.ncalls / tree.num_nodes)\n") if tree.root.options[:global_dual_tightening] println("\t Total number of global tightenings: ", sum(result[:global_tightenings])) - println("\t Global tightenings / node: ", round(sum(result[:global_tightenings])/length(result[:global_tightenings]), digits=2)) + println( + "\t Global tightenings / node: ", + round( + sum(result[:global_tightenings]) / length(result[:global_tightenings]), + digits=2, + ), + ) end if tree.root.options[:dual_tightening] println("\t Total number of local tightenings: ", sum(result[:local_tightenings])) - println("\t Local tightenings / node: ", round(sum(result[:local_tightenings])/length(result[:local_tightenings]), digits=2)) - println("\t Total number of potential local tightenings: ", sum(result[:local_potential_tightenings])) + println( + "\t Local tightenings / node: ", + round( + sum(result[:local_tightenings]) / length(result[:local_tightenings]), + digits=2, + ), + ) + println( + "\t Total number of potential local tightenings: ", + sum(result[:local_potential_tightenings]), + ) end end @@ -656,4 +684,3 @@ function postsolve(tree, result, time_ref, verbose, max_iteration_post) return x end - diff --git a/src/managed_blmo.jl b/src/managed_blmo.jl index 909fcca0d..860284985 100644 --- a/src/managed_blmo.jl +++ b/src/managed_blmo.jl @@ -30,9 +30,7 @@ n - Total number of variables. int_vars - List of indices of the integer variables. solving_time - The time evaluate `compute_extreme_point`. """ -mutable struct ManagedBoundedLMO{ - SBLMO<:SimpleBoundableLMO -} <: BoundedLinearMinimizationOracle +mutable struct ManagedBoundedLMO{SBLMO<:SimpleBoundableLMO} <: BoundedLinearMinimizationOracle simple_lmo::SBLMO lower_bounds::Vector{Float64} upper_bounds::Vector{Float64} @@ -42,11 +40,13 @@ mutable struct ManagedBoundedLMO{ end function ManagedBoundedLMO(simple_lmo, lb, ub, n, int_vars) - if length(lb) != length(ub) || length(ub) != length(int_vars) ||length(lb) != length(int_vars) - error("Supply lower and upper bounds for all integer variables. If there are no explicit bounds, set entry to Inf and -Inf, respectively. The entries have to match the entries of int_vars!") + if length(lb) != length(ub) || length(ub) != length(int_vars) || length(lb) != length(int_vars) + error( + "Supply lower and upper bounds for all integer variables. If there are no explicit bounds, set entry to Inf and -Inf, respectively. The entries have to match the entries of int_vars!", + ) end # Check that we have integer bounds - for (i,_) in enumerate(int_vars) + for (i, _) in enumerate(int_vars) @assert isapprox(lb[i], round(lb[i]), atol=1e-6, rtol=1e-2) @assert isapprox(ub[i], round(ub[i]), atol=1e-6, rtol=1e-2) end @@ -58,11 +58,17 @@ end # Overload FrankWolfe.compute_extreme_point function compute_extreme_point(blmo::ManagedBoundedLMO, d; kwargs...) time_ref = Dates.now() - v = bounded_compute_extreme_point(blmo.simple_lmo, d, blmo.lower_bounds, blmo.upper_bounds, blmo.int_vars) + v = bounded_compute_extreme_point( + blmo.simple_lmo, + d, + blmo.lower_bounds, + blmo.upper_bounds, + blmo.int_vars, + ) blmo.solving_time = float(Dates.value(Dates.now() - time_ref)) return v end - + # Read global bounds from the problem. function build_global_bounds(blmo::ManagedBoundedLMO, integer_variables) global_bounds = IntegerBounds() @@ -164,8 +170,12 @@ end # That means does v satisfy all bounds and other linear constraints? function is_linear_feasible(blmo::ManagedBoundedLMO, v::AbstractVector) for (i, int_var) in enumerate(blmo.int_vars) - if !(blmo.lower_bounds[i] ≤ v[int_var] + 1e-6 || !(v[int_var] - 1e-6 ≤ blmo.upper_bounds[i])) - @debug("Vertex entry: $(v[int_var]) Lower bound: $(blmo.lower_bounds[i]) Upper bound: $(blmo.upper_bounds[i]))") + if !( + blmo.lower_bounds[i] ≤ v[int_var] + 1e-6 || !(v[int_var] - 1e-6 ≤ blmo.upper_bounds[i]) + ) + @debug( + "Vertex entry: $(v[int_var]) Lower bound: $(blmo.lower_bounds[i]) Upper bound: $(blmo.upper_bounds[i]))" + ) return false end end @@ -186,13 +196,13 @@ end # Safety check only. function build_LMO_correct(blmo::ManagedBoundedLMO, node_bounds) for key in keys(node_bounds.lower_bounds) - idx = findfirst(x -> x== key, blmo.int_vars) - if idx === nothing || blmo.lower_bounds[idx] != node_bounds[key, :greaterthan] + idx = findfirst(x -> x == key, blmo.int_vars) + if idx === nothing || blmo.lower_bounds[idx] != node_bounds[key, :greaterthan] return false end end for key in keys(node_bounds.upper_bounds) - idx = findfirst(x -> x== key, blmo.int_vars) + idx = findfirst(x -> x == key, blmo.int_vars) if idx === nothing || blmo.upper_bounds[idx] != node_bounds[key, :lessthan] return false end @@ -202,7 +212,7 @@ end # Check whether a split is valid, i.e. the upper and lower on variable vidx are not the same. function is_valid_split(tree::Bonobo.BnBTree, blmo::ManagedBoundedLMO, vidx::Int) - idx = findfirst(x-> x==vidx, blmo.int_vars) + idx = findfirst(x -> x == vidx, blmo.int_vars) return blmo.lower_bounds[idx] != blmo.upper_bounds[idx] end @@ -226,9 +236,9 @@ function solve( branching_strategy=Bonobo.MOST_INFEASIBLE(), variant::FrankWolfeVariant=BPCG(), line_search::FrankWolfe.LineSearchMethod=FrankWolfe.Adaptive(), - active_set::Union{Nothing, FrankWolfe.ActiveSet} = nothing, + active_set::Union{Nothing,FrankWolfe.ActiveSet}=nothing, lazy=true, - lazy_tolerance = 2.0, + lazy_tolerance=2.0, fw_epsilon=1e-2, verbose=false, dual_gap=1e-6, @@ -246,15 +256,15 @@ function solve( global_dual_tightening=true, bnb_callback=nothing, strong_convexity=0.0, - domain_oracle= x->true, + domain_oracle=x -> true, start_solution=nothing, - fw_verbose = false, + fw_verbose=false, kwargs..., ) blmo = ManagedBoundedLMO(sblmo, lower_bounds, upper_bounds, n, int_vars) return solve( f, - grad!, + grad!, blmo, traverse_strategy=traverse_strategy, branching_strategy=branching_strategy, @@ -283,6 +293,6 @@ function solve( domain_oracle=domain_oracle, start_solution=start_solution, fw_verbose=fw_verbose, - kwargs... + kwargs..., ) end diff --git a/src/node.jl b/src/node.jl index 4942b0a71..f70d1ea2a 100644 --- a/src/node.jl +++ b/src/node.jl @@ -58,9 +58,10 @@ function Bonobo.get_branching_nodes_info(tree::Bonobo.BnBTree, node::FrankWolfeN # In case of strong convexity, check if a child can be pruned prune_left, prune_right = prune_children(tree, node, lower_bound_base, x, vidx) - + # Split active set - active_set_left, active_set_right = split_vertices_set!(node.active_set, tree, vidx, node.local_bounds) + active_set_left, active_set_right = + split_vertices_set!(node.active_set, tree, vidx, node.local_bounds) discarded_set_left, discarded_set_right = split_vertices_set!(node.discarded_vertices, tree, vidx, x, node.local_bounds) @@ -141,13 +142,13 @@ function Bonobo.get_branching_nodes_info(tree::Bonobo.BnBTree, node::FrankWolfeN x_right = FrankWolfe.compute_active_set_iterate!(active_set_right) domain_oracle = tree.root.options[:domain_oracle] - nodes = if !prune_left && !prune_right + nodes = if !prune_left && !prune_right [node_info_left, node_info_right] - elseif prune_left + elseif prune_left [node_info_right] - elseif prune_right + elseif prune_right [node_info_left] - elseif domain_oracle(x_right) + elseif domain_oracle(x_right) [node_info_right] elseif domain_oracle(x_left) [node_info_left] @@ -163,7 +164,7 @@ Use strong convexity to potentially remove one of the children nodes function prune_children(tree, node, lower_bound_base, x, vidx) prune_left = false prune_right = false - + μ = tree.root.options[:strong_convexity] if μ > 0 @debug "Using strong convexity $μ" @@ -171,13 +172,10 @@ function prune_children(tree, node, lower_bound_base, x, vidx) if vidx == j continue end - lower_bound_base += μ/2 * min( - (x[j] - floor(x[j]))^2, - (ceil(x[j]) - x[j])^2, - ) + lower_bound_base += μ / 2 * min((x[j] - floor(x[j]))^2, (ceil(x[j]) - x[j])^2) end - new_bound_left = lower_bound_base + μ/2 * (x[vidx] - floor(x[vidx]))^2 - new_bound_right = lower_bound_base + μ/2 * (ceil(x[vidx]) - x[vidx])^2 + new_bound_left = lower_bound_base + μ / 2 * (x[vidx] - floor(x[vidx]))^2 + new_bound_right = lower_bound_base + μ / 2 * (ceil(x[vidx]) - x[vidx])^2 if new_bound_left > tree.incumbent @debug "prune left, from $(node.lb) -> $new_bound_left, ub $(tree.incumbent), lb $(node.lb)" prune_left = true @@ -186,7 +184,10 @@ function prune_children(tree, node, lower_bound_base, x, vidx) @debug "prune right, from $(node.lb) -> $new_bound_right, ub $(tree.incumbent), lb $(node.lb)" prune_right = true end - @assert !((new_bound_left > tree.incumbent + tree.root.options[:dual_gap]) && (new_bound_right > tree.incumbent + tree.root.options[:dual_gap])) "both sides should not be pruned" + @assert !( + (new_bound_left > tree.incumbent + tree.root.options[:dual_gap]) && + (new_bound_right > tree.incumbent + tree.root.options[:dual_gap]) + ) "both sides should not be pruned" end # If both nodes are pruned, when one of them has to be equal to the incumbent. @@ -246,7 +247,7 @@ function Bonobo.evaluate_node!(tree::Bonobo.BnBTree, node::FrankWolfeNode) active_set = node.active_set x = FrankWolfe.compute_active_set_iterate!(node.active_set) @assert is_linear_feasible(tree.root.problem.tlmo, x) - for (_,v) in node.active_set + for (_, v) in node.active_set @assert is_linear_feasible(tree.root.problem.tlmo, v) end @@ -294,9 +295,9 @@ function Bonobo.evaluate_node!(tree::Bonobo.BnBTree, node::FrankWolfeNode) if is_integer_feasible(tree, x) node.ub = primal return lower_bound, primal - # Sanity check: If the incumbent is better than the lower bound of the root node - # and the root node is not integer feasible, something is off! - elseif node.id == 1 + # Sanity check: If the incumbent is better than the lower bound of the root node + # and the root node is not integer feasible, something is off! + elseif node.id == 1 @debug "Lower bound of root node: $(lower_bound)" @debug "Current incumbent: $(tree.incumbent)" @assert lower_bound <= tree.incumbent + 1e-5 @@ -328,7 +329,10 @@ function dual_tightening(tree, node, x, dual_gap) end gj = grad[j] safety_tolerance = 2.0 - rhs = tree.incumbent - tree.root.problem.f(x) + safety_tolerance * dual_gap + sqrt(eps(tree.incumbent)) + rhs = + tree.incumbent - tree.root.problem.f(x) + + safety_tolerance * dual_gap + + sqrt(eps(tree.incumbent)) if ≈(x[j], lb, atol=tree.options.atol, rtol=tree.options.rtol) if !isapprox(gj, 0, atol=1e-5) num_potential_tightenings += 1 @@ -337,9 +341,9 @@ function dual_tightening(tree, node, x, dual_gap) Mlb = 0 bound_tightened = true @debug "starting tightening ub $(rhs)" - while 0.99 * (Mlb * gj + μ/2 * Mlb^2) <= rhs + while 0.99 * (Mlb * gj + μ / 2 * Mlb^2) <= rhs Mlb += 1 - if lb + Mlb -1 == ub + if lb + Mlb - 1 == ub bound_tightened = false break end @@ -350,19 +354,20 @@ function dual_tightening(tree, node, x, dual_gap) node.local_bounds[j, :lessthan] = new_bound num_tightenings += 1 if haskey(tree.root.problem.integer_variable_bounds, (j, :lessthan)) - @assert node.local_bounds[j, :lessthan] <= tree.root.problem.integer_variable_bounds[j, :lessthan] + @assert node.local_bounds[j, :lessthan] <= + tree.root.problem.integer_variable_bounds[j, :lessthan] end end end - elseif ≈(x[j], ub, atol=tree.options.atol, rtol=tree.options.rtol) - if !isapprox(gj,0,atol=1e-5) + elseif ≈(x[j], ub, atol=tree.options.atol, rtol=tree.options.rtol) + if !isapprox(gj, 0, atol=1e-5) num_potential_tightenings += 1 end if gj < 0 Mub = 0 bound_tightened = true @debug "starting tightening lb $(rhs)" - while -0.99 * (Mub * gj + μ/2 * Mub^2) <= rhs + while -0.99 * (Mub * gj + μ / 2 * Mub^2) <= rhs Mub += 1 if ub - Mub + 1 == lb bound_tightened = false @@ -375,7 +380,8 @@ function dual_tightening(tree, node, x, dual_gap) node.local_bounds[j, :greaterthan] = new_bound num_tightenings += 1 if haskey(tree.root.problem.integer_variable_bounds, (j, :greaterthan)) - @assert node.local_bounds[j, :greaterthan] >= tree.root.problem.integer_variable_bounds[j, :greaterthan] + @assert node.local_bounds[j, :greaterthan] >= + tree.root.problem.integer_variable_bounds[j, :greaterthan] end end end @@ -433,7 +439,7 @@ function global_tightening(tree, node) lb = lb while Mlb * gj <= rhs Mlb += 1 - if lb + Mlb -1 == ub + if lb + Mlb - 1 == ub bound_tightened = false break end @@ -441,11 +447,11 @@ function global_tightening(tree, node) if bound_tightened new_bound = lb + Mlb - 1 @debug "found global UB tightening $ub -> $new_bound" - if haskey(tree.root.global_tightenings.upper_bounds,j) + if haskey(tree.root.global_tightenings.upper_bounds, j) if tree.root.global_tightenings.upper_bounds[j] != new_bound - num_tightenings +=1 + num_tightenings += 1 end - else + else num_tightenings += 1 end tree.root.global_tightenings.upper_bounds[j] = new_bound @@ -468,11 +474,11 @@ function global_tightening(tree, node) if bound_tightened new_bound = ub - Mub + 1 @debug "found global LB tightening $lb -> $new_bound" - if haskey(tree.root.global_tightenings.lower_bounds,j) + if haskey(tree.root.global_tightenings.lower_bounds, j) if tree.root.global_tightenings.lower_bounds[j] != new_bound - num_tightenings +=1 + num_tightenings += 1 end - else + else num_tightenings += 1 end tree.root.global_tightenings.lower_bounds[j] = new_bound @@ -494,8 +500,8 @@ function tightening_strong_convexity(tree, x, lower_bound) for j in tree.root.problem.integer_variables if x[j] > floor(x[j]) + 1e-6 && x[j] < ceil(x[j]) - 1e-6 num_fractional += 1 - new_left_increment = μ/2 * (x[j] - floor(x[j]))^2 - new_right_increment = μ/2 * (ceil(x[j]) - x[j])^2 + new_left_increment = μ / 2 * (x[j] - floor(x[j]))^2 + new_right_increment = μ / 2 * (ceil(x[j]) - x[j])^2 new_increment = min(new_left_increment, new_right_increment) strong_convexity_bound += new_increment end diff --git a/src/problem.jl b/src/problem.jl index 32ff39378..8d1479c72 100644 --- a/src/problem.jl +++ b/src/problem.jl @@ -18,12 +18,8 @@ s.t. x ∈ X (given by the LMO) x_j ∈ Z ∀ j in integer_variables ``` """ -mutable struct SimpleOptimizationProblem{ - F, - G, - TLMO<:TimeTrackingLMO, - IB<:IntegerBounds, -} <: AbstractSimpleOptimizationProblem +mutable struct SimpleOptimizationProblem{F,G,TLMO<:TimeTrackingLMO,IB<:IntegerBounds} <: + AbstractSimpleOptimizationProblem f::F g::G nvars::Int @@ -63,7 +59,8 @@ function is_integer_feasible( end function is_integer_feasible(tree::Bonobo.BnBTree, x::AbstractVector) - indicator_feasible = indicator_present(tree) ? is_indicator_feasible(tree.root.problem.tlmo.blmo.o, x) : true + indicator_feasible = + indicator_present(tree) ? is_indicator_feasible(tree.root.problem.tlmo.blmo.o, x) : true return is_integer_feasible( tree.root.problem.integer_variables, x; diff --git a/src/strong_branching.jl b/src/strong_branching.jl index 24860f7f9..c69546ba5 100644 --- a/src/strong_branching.jl +++ b/src/strong_branching.jl @@ -1,5 +1,6 @@ -struct PartialStrongBranching{BLMO<:BoundedLinearMinimizationOracle} <: Bonobo.AbstractBranchStrategy +struct PartialStrongBranching{BLMO<:BoundedLinearMinimizationOracle} <: + Bonobo.AbstractBranchStrategy max_iteration::Int solving_epsilon::Float64 bounded_lmo::BLMO @@ -131,8 +132,11 @@ end Hybrid between partial strong branching and another strategy. `perform_strong_branch(tree, node) -> Bool` decides whether to perform strong branching or not. """ -struct HybridStrongBranching{BLMO<:BoundedLinearMinimizationOracle,F<:Function,B<:Bonobo.AbstractBranchStrategy} <: - Bonobo.AbstractBranchStrategy +struct HybridStrongBranching{ + BLMO<:BoundedLinearMinimizationOracle, + F<:Function, + B<:Bonobo.AbstractBranchStrategy, +} <: Bonobo.AbstractBranchStrategy pstrong::PartialStrongBranching{BLMO} perform_strong_branch::F alternative_branching::B @@ -144,7 +148,7 @@ function HybridStrongBranching( bounded_lmo::BoundedLinearMinimizationOracle, perform_strong_branch::Function, alternative=Bonobo.MOST_INFEASIBLE(), -) +) return HybridStrongBranching( PartialStrongBranching(max_iteration, solving_epsilon, bounded_lmo), perform_strong_branch, @@ -174,7 +178,7 @@ function strong_up_to_depth( bounded_lmo::BoundedLinearMinimizationOracle, max_depth::Int, alternative=Bonobo.MOST_INFEASIBLE(), -) +) perform_strong_while_depth(_, node) = node.level <= max_depth return HybridStrongBranching( PartialStrongBranching(max_iteration, solving_epsilon, bounded_lmo), diff --git a/src/time_tracking_lmo.jl b/src/time_tracking_lmo.jl index 4e075e278..d9b50141c 100644 --- a/src/time_tracking_lmo.jl +++ b/src/time_tracking_lmo.jl @@ -16,7 +16,7 @@ end TimeTrackingLMO(blmo::BoundedLinearMinimizationOracle) = TimeTrackingLMO(blmo, Float64[], Int[], Int[], 0, Int[]) - + TimeTrackingLMO(blmo::BoundedLinearMinimizationOracle, int_vars) = TimeTrackingLMO(blmo, Float64[], Int[], Int[], 0, int_vars) diff --git a/src/utilities.jl b/src/utilities.jl index 49b4e12a0..4fefe82c9 100644 --- a/src/utilities.jl +++ b/src/utilities.jl @@ -143,9 +143,9 @@ end function is_bound_feasible(bounds::IntegerBounds, v; atol=1e-5) for (idx, set) in bounds.lower_bounds - if v[idx] < set - atol + if v[idx] < set - atol return false - end + end end for (idx, set) in bounds.upper_bounds if v[idx] > set + atol diff --git a/test/indicator_test.jl b/test/indicator_test.jl index b207bfe80..f865209e2 100644 --- a/test/indicator_test.jl +++ b/test/indicator_test.jl @@ -24,21 +24,27 @@ const MOIU = MOI.Utilities MOI.add_constraint(o, z[i], MOI.GreaterThan(0.0)) MOI.add_constraint(o, z[i], MOI.LessThan(1.0)) - MOI.add_constraint(o, z[i], MOI.ZeroOne()) - end + MOI.add_constraint(o, z[i], MOI.ZeroOne()) + end blmo = Boscia.MathOptBLMO(o) @test Boscia.indicator_present(blmo) == false for i in 1:n gl = MOI.VectorAffineFunction( - [ MOI.VectorAffineTerm(1, MOI.ScalarAffineTerm(1.0, z[i])), - MOI.VectorAffineTerm(2, MOI.ScalarAffineTerm(1.0, x[i])),], - [0.0, 0.0], ) + [ + MOI.VectorAffineTerm(1, MOI.ScalarAffineTerm(1.0, z[i])), + MOI.VectorAffineTerm(2, MOI.ScalarAffineTerm(1.0, x[i])), + ], + [0.0, 0.0], + ) gg = MOI.VectorAffineFunction( - [ MOI.VectorAffineTerm(1, MOI.ScalarAffineTerm(1.0, z[i])), - MOI.VectorAffineTerm(2, MOI.ScalarAffineTerm(-1.0, x[i])),], - [0.0, 0.0], ) + [ + MOI.VectorAffineTerm(1, MOI.ScalarAffineTerm(1.0, z[i])), + MOI.VectorAffineTerm(2, MOI.ScalarAffineTerm(-1.0, x[i])), + ], + [0.0, 0.0], + ) MOI.add_constraint(o, gl, MOI.Indicator{MOI.ACTIVATE_ON_ONE}(MOI.LessThan(0.0))) MOI.add_constraint(o, gg, MOI.Indicator{MOI.ACTIVATE_ON_ONE}(MOI.LessThan(0.0))) end @@ -50,7 +56,7 @@ const MOIU = MOI.Utilities if isapprox(x[n+i], 1.0) x[i] = 0.0 end - end + end end x = [0.5, 1.0, 0.75, 0.0, 0.9, 1.0, 1.0, 1.0, 0.0, 0.0] @@ -59,5 +65,5 @@ const MOIU = MOI.Utilities @test Boscia.is_indicator_feasible(blmo, x) == false @test Boscia.is_indicator_feasible(blmo, y) == true ind_rounding(x) - @test Boscia.is_indicator_feasible(blmo, x) == true + @test Boscia.is_indicator_feasible(blmo, x) == true end diff --git a/test/interface_test.jl b/test/interface_test.jl index 1375f1e8f..1b983431f 100644 --- a/test/interface_test.jl +++ b/test/interface_test.jl @@ -21,7 +21,7 @@ diffi = Random.rand(Bool, n) * 0.6 .+ 0.3 function grad!(storage, x) @. storage = x - diffi end - + function build_norm_lmo() o = SCIP.Optimizer() MOI.set(o, MOI.Silent(), true) @@ -34,9 +34,18 @@ diffi = Random.rand(Bool, n) * 0.6 .+ 0.3 end return FrankWolfe.MathOptLMO(o) end - x_baseline, _, result = Boscia.solve(f, grad!, build_norm_lmo(), verbose=false, dual_tightening=false) - x_tighten, _, result = Boscia.solve(f, grad!, build_norm_lmo(), verbose=false, dual_tightening=true) - x_strong, _, result = Boscia.solve(f, grad!, build_norm_lmo(), verbose=false, dual_tightening=true, strong_convexity=1.0) + x_baseline, _, result = + Boscia.solve(f, grad!, build_norm_lmo(), verbose=false, dual_tightening=false) + x_tighten, _, result = + Boscia.solve(f, grad!, build_norm_lmo(), verbose=false, dual_tightening=true) + x_strong, _, result = Boscia.solve( + f, + grad!, + build_norm_lmo(), + verbose=false, + dual_tightening=true, + strong_convexity=1.0, + ) @test x_baseline == round.(diffi) @test f(x_tighten) == f(result[:raw_solution]) @@ -67,7 +76,7 @@ end branching_strategy = Boscia.PartialStrongBranching(10, 1e-3, blmo) MOI.set(branching_strategy.bounded_lmo.o, MOI.Silent(), true) - x, _, result = Boscia.solve(f, grad!, lmo, verbose=false, branching_strategy = branching_strategy) + x, _, result = Boscia.solve(f, grad!, lmo, verbose=false, branching_strategy=branching_strategy) @test x == round.(diffi) @test f(x) == f(result[:raw_solution]) @@ -200,16 +209,19 @@ diffi = Random.rand(Bool, n) * 0.6 .+ 0.3 end lmo = build_model() - x_afw, _, result_afw = Boscia.solve(f, grad!, lmo, verbose=false, variant=Boscia.AwayFrankWolfe()) + x_afw, _, result_afw = + Boscia.solve(f, grad!, lmo, verbose=false, variant=Boscia.AwayFrankWolfe()) lmo = build_model() - x_blended, _, result_blended = Boscia.solve(f, grad!, lmo, verbose=false, variant=Boscia.Blended()) + x_blended, _, result_blended = + Boscia.solve(f, grad!, lmo, verbose=false, variant=Boscia.Blended()) lmo = build_model() x_bpcg, _, result_bpcg = Boscia.solve(f, grad!, lmo, verbose=false, variant=Boscia.BPCG()) lmo = build_model() - x_vfw, _, result_vfw = Boscia.solve(f, grad!, lmo, verbose=false, variant=Boscia.VanillaFrankWolfe()) + x_vfw, _, result_vfw = + Boscia.solve(f, grad!, lmo, verbose=false, variant=Boscia.VanillaFrankWolfe()) @test isapprox(f(x_afw), f(result_afw[:raw_solution]), atol=1e-6, rtol=1e-3) @test isapprox(f(x_blended), f(result_blended[:raw_solution]), atol=1e-6, rtol=1e-3) @@ -248,15 +260,18 @@ end lmo = build_model() line_search = FrankWolfe.Adaptive() - x_adaptive, _, result_adaptive = Boscia.solve(f, grad!, lmo, verbose=false, line_search=line_search) + x_adaptive, _, result_adaptive = + Boscia.solve(f, grad!, lmo, verbose=false, line_search=line_search) lmo = build_model() line_search = FrankWolfe.MonotonicStepSize() - x_monotonic, _, result_monotonic = Boscia.solve(f, grad!, lmo, verbose=false, line_search=line_search, time_limit=120) + x_monotonic, _, result_monotonic = + Boscia.solve(f, grad!, lmo, verbose=false, line_search=line_search, time_limit=120) lmo = build_model() line_search = FrankWolfe.Agnostic() - x_agnostic, _, result_agnostic = Boscia.solve(f, grad!, lmo, verbose=false, line_search=line_search, time_limit=120) + x_agnostic, _, result_agnostic = + Boscia.solve(f, grad!, lmo, verbose=false, line_search=line_search, time_limit=120) @test isapprox(f(x_adaptive), f(result_adaptive[:raw_solution]), atol=1e-6, rtol=1e-3) @test isapprox(f(x_monotonic), f(result_monotonic[:raw_solution]), atol=1e-6, rtol=1e-3) @@ -308,4 +323,3 @@ diffi = Random.rand(Bool, n) * 0.6 .+ 0.3 @test sum(isapprox.(x_lazy, x_no, atol=1e-6, rtol=1e-2)) == n @test sum(isapprox.(x_lazy, x_mid, atol=1e-6, rtol=1e-2)) == n end - diff --git a/test/mean_risk.jl b/test/mean_risk.jl index 67963e035..b7495ceef 100644 --- a/test/mean_risk.jl +++ b/test/mean_risk.jl @@ -59,8 +59,8 @@ const M1 = (A1 + A1') / 2 return storage end - x,_,result = Boscia.solve(f, grad!, lmo, verbose= true, time_limit=300) - - @test dot(a,x) <= b + 1e-4 + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, time_limit=300) + + @test dot(a, x) <= b + 1e-4 @test f(x) <= f(result[:raw_solution]) + 1e-6 end diff --git a/test/poisson.jl b/test/poisson.jl index ac80c79cb..717be4ea6 100644 --- a/test/poisson.jl +++ b/test/poisson.jl @@ -22,13 +22,13 @@ n0 = 30 p = n0 # underlying true weights -const w0 = rand(Float64, p) +const w0 = rand(Float64, p) # set 50 entries to 0 for _ in 1:20 w0[rand(1:p)] = 0 end -const b0 = rand(Float64) -const X0 = rand(Float64, n0, p) +const b0 = rand(Float64) +const X0 = rand(Float64, n0, p) const y0 = map(1:n0) do idx a = dot(X0[idx, :], w0) + b0 return rand(Distributions.Poisson(exp(a))) @@ -56,7 +56,7 @@ N = 1.0 MOI.add_constraint(o, b, MOI.LessThan(N)) MOI.add_constraint(o, b, MOI.GreaterThan(-N)) lmo = FrankWolfe.MathOptLMO(o) - + α = 1.3 function f(θ) w = @view(θ[1:p]) @@ -84,7 +84,7 @@ N = 1.0 return storage end - x, _, result = Boscia.solve(f, grad!, lmo, verbose = true, time_limit=500) + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, time_limit=500) @test f(x) <= f(result[:raw_solution]) + 1e-6 @test sum(x[p+1:2p]) <= k @@ -157,8 +157,8 @@ end blmo = Boscia.MathOptBLMO(HiGHS.Optimizer()) branching_strategy = Boscia.PartialStrongBranching(10, 1e-3, blmo) MOI.set(branching_strategy.bounded_lmo.o, MOI.Silent(), true) - - x, _, result = Boscia.solve(f, grad!, lmo, verbose = true, branching_strategy = branching_strategy) + + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, branching_strategy=branching_strategy) @test sum(x[p+1:2p]) <= k @test f(x) <= f(result[:raw_solution]) + 1e-6 @test sum(x[p+1:2p]) <= k @@ -256,7 +256,7 @@ push!(groups, ((k-1)*group_size+1):pg) storage ./= norm(storage) return storage end - + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true) @test f(x) <= f(result[:raw_solution]) + 1e-6 @test sum(x[p+1:2pg]) <= k @@ -334,8 +334,7 @@ end branching_strategy = Boscia.PartialStrongBranching(10, 1e-3, blmo) MOI.set(branching_strategy.bounded_lmo.o, MOI.Silent(), true) - x, _, result = - Boscia.solve(f, grad!, lmo, verbose=true, branching_strategy=branching_strategy) + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, branching_strategy=branching_strategy) @test f(x) <= f(result[:raw_solution]) + 1e-6 @test sum(x[p+1:2pg]) <= k diff --git a/test/runtests.jl b/test/runtests.jl index 2f4144d87..8df50d075 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -27,7 +27,7 @@ include("interface_test.jl") MOI.add_constraint(o, xi, MOI.LessThan(1.0)) end MOI.add_constraint(o, 1.0x[1] + 1.0x[2], MOI.LessThan(1.5)) - @test Boscia.is_linear_feasible(o, 2*ones(n)) == false + @test Boscia.is_linear_feasible(o, 2 * ones(n)) == false @test Boscia.is_linear_feasible(o, vcat([1.0, 0.5], ones(n - 2))) @test Boscia.is_linear_feasible(o, vcat([0.5, 0.5], ones(n - 2))) @test Boscia.is_linear_feasible(o, vcat([0.0, 0.0], ones(n - 2))) @@ -45,7 +45,7 @@ include("poisson.jl") include("mean_risk.jl") include("time_limit.jl") -n= 10 +n = 10 const diff1 = rand(Bool, n) * 0.8 .+ 1.1 @testset "Strong branching" begin function f(x) @@ -77,7 +77,7 @@ const diff1 = rand(Bool, n) * 0.8 .+ 1.1 MOI.GreaterThan(lb), ) lmo = FrankWolfe.MathOptLMO(o) - + blmo = Boscia.MathOptBLMO(HiGHS.Optimizer()) branching_strategy = Boscia.PartialStrongBranching(10, 1e-3, blmo) MOI.set(branching_strategy.bounded_lmo.o, MOI.Silent(), true) @@ -118,8 +118,8 @@ end MOI.GreaterThan(lb), ) lmo = FrankWolfe.MathOptLMO(o) - - + + function perform_strong_branch(tree, node) return node.level <= length(tree.root.problem.integer_variables) / 3 end @@ -127,8 +127,8 @@ end branching_strategy = Boscia.HybridStrongBranching(10, 1e-3, blmo, perform_strong_branch) MOI.set(branching_strategy.pstrong.bounded_lmo.o, MOI.Silent(), true) - x,_,result = Boscia.solve(f, grad!, lmo, verbose = true, branching_strategy=branching_strategy) - + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, branching_strategy=branching_strategy) + @test isapprox(x, round.(diff1), atol=1e-5, rtol=1e-5) end diff --git a/test/sparse_regression.jl b/test/sparse_regression.jl index 241c3fb45..8e292583c 100644 --- a/test/sparse_regression.jl +++ b/test/sparse_regression.jl @@ -45,7 +45,7 @@ const M = 2 * var(A) MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.([1.0, -M], [x[i], x[i+p]]), 0.0), MOI.LessThan(0.0), ) - end + end MOI.add_constraint( o, MOI.ScalarAffineFunction(MOI.ScalarAffineTerm.(ones(p), x[p+1:2p]), 0.0), @@ -64,7 +64,7 @@ const M = 2 * var(A) return storage end - x, _, result = Boscia.solve(f, grad!, lmo, verbose = true, time_limit=100) + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, time_limit=100) # println("Solution: $(x[1:p])") @test sum(x[1+p:2p]) <= k @test f(x) <= f(result[:raw_solution]) + 1e-6 @@ -129,8 +129,8 @@ end @testset "Sparse Regression Group" begin function f(x) return sum(abs2, y_g - A_g * x[1:p]) + - lambda_0_g * norm(x[p+1:2p])^2 + - lambda_2_g * norm(x[1:p])^2 + lambda_0_g * norm(x[p+1:2p])^2 + + lambda_2_g * norm(x[1:p])^2 end function grad!(storage, x) storage .= vcat( @@ -139,9 +139,9 @@ end ) return storage end - + lmo = build_sparse_lmo_grouped() - x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, fw_epsilon = 1e-3) + x, _, result = Boscia.solve(f, grad!, lmo, verbose=true, fw_epsilon=1e-3) @test sum(x[p+1:2p]) <= k for i in 1:k_int @@ -158,7 +158,7 @@ end μ = 2lambda_0_g lmo = build_sparse_lmo_grouped() - x2, _, result2 = Boscia.solve(f, grad!, lmo, verbose=true, fw_epsilon = 1e-3, strong_convexity=μ) + x2, _, result2 = Boscia.solve(f, grad!, lmo, verbose=true, fw_epsilon=1e-3, strong_convexity=μ) @test sum(x2[p+1:2p]) <= k for i in 1:k_int @test sum(x2[groups[i]]) >= 1