Skip to content

Commit

Permalink
add existing code
Browse files Browse the repository at this point in the history
  • Loading branch information
tylerhanks committed Jul 19, 2023
1 parent 67fde5d commit fc250ab
Show file tree
Hide file tree
Showing 7 changed files with 214 additions and 12 deletions.
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ authors = ["Tyler Hanks <[email protected]>"]
version = "0.0.1"

[deps]
AlgebraicDynamics = "5fd6ff03-a254-427e-8840-ba658f502e32"
Catlab = "134e5e36-593f-5add-ad60-77f754baafbe"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"

[compat]
Catlab = "^0.15"
Expand Down
10 changes: 1 addition & 9 deletions src/AlgebraicOptimization.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,6 @@
"""
module AlgebraicOptimization

export hello

using Catlab

""" hello(name::String)
Returns the string "Hello, <name>!" where `<name>` is replaced with the provided parameter
"""
hello(name::String) = string("Hello, ", name, "!")
include("OpenProblems.jl")

end
141 changes: 141 additions & 0 deletions src/OpenProblems.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
""" Define the UWD algebra of open optimization problems
"""
module OpenProblems

export OpenProblem, nvars, n_exposed_vars, portmap, objective, gradient_flow

using Catlab
using AlgebraicDynamics.UWDDynam
using ForwardDiff
import Catlab.oapply

# Open Problems
###############

""" OpenProblem
An open optimization problem R^`vars` -> R. The problem exposes `exposed_vars` variables
given by the portmap `p`. The function `objective` should take `Vector{Float64}` of length
`vars` as input and return a scalar.
"""
struct OpenProblem
exposed_vars::Int
vars::Int
objective::Function
p::FinFunction
OpenProblem(evs, vs, obj, p) = dom(p) != FinSet(evs) || codom(p) != FinSet(vs) ?
error("Invalid portmap") : new(evs, vs, obj, p)
end
# Convenience constructor when `vars`==`exposed_vars` and `p` is the identity function
OpenProblem(nvars, obj) = OpenProblem(nvars,nvars,obj,FinFunction(1:nvars))

# Make `OpenProblem`s callable
(p::OpenProblem)(z::Vector) = p.objective(z)

nvars(p::OpenProblem) = p.vars
n_exposed_vars(p::OpenProblem) = p.exposed_vars
objective(p::OpenProblem) = p.objective
portmap(p::OpenProblem) = p.p

# Open problems as a UWD algebra
################################

""" fills(d::AbstractUWD, b::Int, p::OpenProblem)
Checks if `p` is of the correct signature to fill box `b` of the uwd `d`
"""
fills(d::AbstractUWD, b::Int, p::OpenProblem) =
b <= nparts(d, :Box) ? length(incident(d, b, :box)) == n_exposed_vars(p) :
error("Trying to fill box $b when $d has fewer than $b boxes")

### oapply helper functions

induced_ports(d::AbstractUWD) = nparts(d, :OuterPort)
induced_ports(d::RelationDiagram) = subpart(d, [:outer_junction, :variable])

# Returns the pushout which induces the new set of variables
function induced_vars(d::AbstractUWD, ps::Vector{OpenProblem}, inclusions::Function)
for b in parts(d, :Box)
fills(d, b, ps[b]) || error("$(ps[b]) does not fill box $b")
end

total_portmap = copair([compose(portmap(ps[i]), inclusions(i)) for i in 1:length(ps)])

#return pushout(FinFunction(subpart(d, :junction), nparts(d, :Junction)), total_portmap)
return pushout(total_portmap, FinFunction(subpart(d, :junction), nparts(d, :Junction)))
end

# Takes a FinFunction from N->M and returns the induced linear map R^M->R^N
function induced_matrix(dom::Int, codom::Int, f::Vector{Int})::Matrix{Float64}
length(f) == dom && max(f...) <= codom || error("Invalid FinFunction.")
res = zeros(dom, codom)
for (i,j) in Iterators.product(1:dom, 1:codom)
if f[i] == j
res[i,j] = 1
end
end
return res
end

induced_matrix(f::FinFunction) = induced_matrix(length(dom(f)), length(codom(f)), f.func)

# Sums objective functions of `ps` subject to correct variable sharing according to `d`.
# `var_map` should be the left leg of the induced variable pushout.
# `inclusions` should be a function mapping box numbers to the inclusion of that box's variables
# into the disjoint union of all the boxes' variables.
function induced_objective(d::AbstractUWD, ps::Vector{OpenProblem}, var_map::FinFunction, inclusions::Function)
proj_mats = Matrix[]
for b in parts(d, :Box)
inc = compose(inclusions(b), var_map)
push!(proj_mats, induced_matrix(inc))
end
return z::Vector -> sum([ps[b](proj_mats[b]*z) for b in 1:length(ps)])
end

""" oapply(d::AbstractUWD, ps::Vector{OpenProblem})
Implements the UWD algebra of open optimization problems. Given a composition pattern (implemented
by an undirected wiring diagram `d`) and open subproblems `ps` returns the composite
open optimization problem.
Each box of `d` must be filled by a problem of the appropriate type signature.
"""
function oapply(d::AbstractUWD, ps::Vector{OpenProblem})
# Check that the number of problems provided matches the number of boxes in the UWD
nboxes(d) == length(ps) || error("Number of problems does not match number of boxes.")
# Ensure that each problem fills its associated box
for i in 1:nboxes(d)
fills(d, i, ps[i]) || error("Problem $i doesn't fill Box $i")
end

M = coproduct((FinSetnvars).(ps))
inclusions(b::Int) = legs(M)[b]

Mpo = induced_vars(d, ps, inclusions)
#println(typeof(Mpo))

obj = induced_objective(d, ps, legs(Mpo)[1], inclusions)

junction_map = legs(Mpo)[2]
outer_junction_map = FinFunction(subpart(d, :outer_junction), nparts(d, :Junction))
return OpenProblem(
length(induced_ports(d)),
length(apex(Mpo)),
obj,
compose(outer_junction_map, junction_map)
)
end

# Gradient flow as an algebra morphism
######################################

gradient_flow(p::OpenProblem) = ContinuousResourceSharer{Float64}(
n_exposed_vars(p),
nvars(p),
(x,_,_) -> -ForwardDiff.gradient(objective(p), x),
portmap(p).func
)

gradient_flow(ps::Vector{OpenProblem}) = map(p->gradient_flow(p), ps)

end # module
65 changes: 65 additions & 0 deletions test/OpenProblems.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
using AlgebraicOptimization.OpenProblems

using Catlab
using AlgebraicDynamics.UWDDynam
using Test
using ForwardDiff

d = @relation (x,y,z) begin
f(x,w)
g(y,w)
h(z,w)
end

A = [1 2; 3 4]

f(x) = x[1]^2 + x[2]
g(y) = y'*A*y - [2,1]'*y
h(z) = z[1]^2 + z[2]^2


open_f = OpenProblem(2,f)
open_g = OpenProblem(2,g)
open_h = OpenProblem(2,h)

cp = oapply(d, [open_f,open_g,open_h])
true_cp(u) = f([u[1],u[2]]) + g([u[3],u[2]]) + h([u[4],u[2]])

test_u = rand(4)*10
#test_u = Float64[3,2,1,2]
@test cp(test_u) == true_cp(test_u)

@time cp(test_u)
@time true_cp(test_u)

@test ForwardDiff.gradient(objective(cp), test_u) == ForwardDiff.gradient(true_cp, test_u)

@time ForwardDiff.gradient(objective(cp), test_u)
@time ForwardDiff.gradient(true_cp, test_u)

# Test naturality of gradient flow

function iterate(f, x0, num_iters)
x = x0
for i in 1:num_iters
x = f(x)
end
return x
end


γ = 0.01
x0 = repeat([100], 4)
num_iters=100

gf1 = oapply(d, gradient_flow([open_f, open_g, open_h]))
gd1 = euler_approx(gf1, γ)
#sol1 = trajectory(gd1, test_u, nothing, tspan)
sol1 = iterate(x -> eval_dynamics(gd1, x), x0, num_iters)

gf2 = gradient_flow(cp)
gd2 = euler_approx(gf2, γ)
#sol2 = trajectory(gd2, test_u, nothing, tspan)
sol2 = iterate(x -> eval_dynamics(gd2, x), x0, num_iters)

@test sol1 == sol2
3 changes: 3 additions & 0 deletions test/Project.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
[deps]
AlgebraicOptimization = "a72ceada-00ec-4ad9-90d3-37b40eaed052"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
AlgebraicDynamics = "5fd6ff03-a254-427e-8840-ba658f502e32"
Catlab = "134e5e36-593f-5add-ad60-77f754baafbe"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
1 change: 0 additions & 1 deletion test/core.jl

This file was deleted.

4 changes: 2 additions & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@ using Test

using AlgebraicOptimization

@testset "Core" begin
include("core.jl")
@testset "Open Optimization Problems" begin
include("OpenProblems.jl")
end

0 comments on commit fc250ab

Please sign in to comment.