Skip to content

Commit

Permalink
Add CBLS solver for ICNs (#7)
Browse files Browse the repository at this point in the history
* Temp save

* Fix for cbls icn

* Add CBLS solver for ICNs. Tag new version

* Update CI
  • Loading branch information
Azzaare authored Jan 19, 2023
1 parent b9a49df commit 06ddd31
Show file tree
Hide file tree
Showing 9 changed files with 123 additions and 33 deletions.
4 changes: 1 addition & 3 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@ jobs:
fail-fast: false
matrix:
version:
# - '1.6'
- '1.7'
- "^1.8.0-0"
- "1.8"
- 'nightly'
os:
- ubuntu-latest
Expand Down
5 changes: 4 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,16 +1,19 @@
name = "ConstraintLearning"
uuid = "4bd09668-9077-4be7-adc9-6307a490e6df"
authors = ["azzaare <jf@baffier.fr> and contributors"]
version = "0.1.3"
version = "0.1.4"

[deps]
CompositionalNetworks = "4b67e4b5-442d-4ef5-b760-3f5df3a57537"
ConstraintDomains = "5800fd60-8556-4464-8d61-84ebf7a0bedb"
Constraints = "30f324ab-b02d-43f0-b619-e131c61659f7"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Dictionaries = "85a47980-9c8c-11e8-2b9f-f7ca1fa99fb4"
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
LocalSearchSolvers = "2b10edaa-728d-4283-ac71-07e312d6ccf3"
Memoization = "6fafb56a-5788-4b4e-91ca-c0cea6611c73"
PatternFolds = "c18a7f1d-76ad-4ce4-950d-5419b888513b"
PrettyTables = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d"
QUBOConstraints = "110b7fcc-6942-4061-a625-86e03d1de7cf"
ThreadPools = "b189fb0b-2eb5-4ed4-bc0c-d34c51242431"
Expand Down
8 changes: 7 additions & 1 deletion src/ConstraintLearning.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ module ConstraintLearning

# SECTION - imports
using ConstraintDomains
using Constraints
using LocalSearchSolvers

using CompositionalNetworks
using Dictionaries
Expand All @@ -17,12 +19,15 @@ using PrettyTables
import Flux.Optimise: update!
import Flux: params

# SECTION - usings
import CompositionalNetworks: exclu, nbits_exclu, nbits, layers, compose, as_int

# SECTION - exports
export icn
export qubo

export ICNConfig
export ICNGeneticOptimizer
export ICNLocalSearchOptimizer
export ICNOptimizer

export QUBOGradientOptimizer
Expand All @@ -34,6 +39,7 @@ include("common.jl")
# SECTION - ICN
include("icn/base.jl")
include("icn/genetic.jl")
include("icn/cbls.jl")
include("icn.jl")

# SECTION - QUBO
Expand Down
8 changes: 4 additions & 4 deletions src/common.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ function make_training_sets(X, penalty, p, ds)
end

# REVIEW - Is it correct? Make a CI test
function make_training_sets(X, penalty::Vector{T}, _, _) where {T <: Real}
function make_training_sets(X, penalty::Vector{T}, _) where {T <: Real}
solutions = Set{Vector{Int}}()
non_sltns = Set{Vector{Int}}()

Expand All @@ -46,10 +46,10 @@ function make_set_penalty(X, X̅)
return X_train, penalty
end

make_set_penalty(X, X̅, _, ::Nothing) = make_set_penalty(X, X̅)
make_set_penalty(X, X̅, ::Nothing) = make_set_penalty(X, X̅)

function make_set_penalty(X, X̅, param, icn_conf)
penalty = icn(X, X̅; param, metric = icn_conf.metric, optimizer = icn.optimizer)
function make_set_penalty(X, X̅, icn_conf; parameters...)
penalty = icn(X, X̅; metric = icn_conf.metric, optimizer = icn.optimizer, parameters...)
X_train = union(X, X̅)
return X_train, penalty
end
36 changes: 22 additions & 14 deletions src/icn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,45 +5,53 @@ function icn(
dom_size = δ(Iterators.flatten(X), Iterators.flatten(X̅); discrete),
metric = :hamming,
optimizer = ICNGeneticOptimizer(),
param = nothing,
X_test = nothing,
parameters...
)
lc = learn_compose(
X,
X̅,
dom_size,
param;
dom_size;
metric,
optimizer,
X_test,
parameters...
)[1]
return composition(lc)
end

function icn(
domains::Vector{D},
penalty::F;
param = nothing,
configurations = explore(domains, penalty; param),
configurations = nothing,
discrete = true,
dom_size = δ(
Iterators.flatten(configurations[1]),
Iterators.flatten(configurations[2]);
discrete,
),
dom_size = nothing,
metric=:hamming,
optimizer = ICNGeneticOptimizer(),
X_test = nothing,
parameters...
) where {D <: AbstractDomain, F <: Function}
if isnothing(configurations)
configurations = explore(domains, penalty; parameters...)
end

if isnothing(dom_size)
dom_size = δ(
Iterators.flatten(configurations[1]),
Iterators.flatten(configurations[2]);
discrete,
)
end

return icn(
configurations[1],
configurations[2];
param,
discrete,
dom_size,
metric,
optimizer,
X_test,
parameters...
)
end

Expand All @@ -54,18 +62,18 @@ function icn(
dom_size = δ(Iterators.flatten(X); discrete),
metric = :hamming,
optimizer = ICNGeneticOptimizer(),
param = nothing,
X_test = nothing,
parameters...
) where {F <: Function}
solutions, non_sltns = make_training_sets(X, penalty, param, dom_size)
solutions, non_sltns = make_training_sets(X, penalty, dom_size; parameters...)
return icn(
solutions,
non_sltns;
param,
discrete,
dom_size,
metric,
optimizer,
X_test,
parameters...
)
end
69 changes: 69 additions & 0 deletions src/icn/cbls.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
struct ICNLocalSearchOptimizer <: ICNOptimizer
options::LocalSearchSolvers.Options

ICNLocalSearchOptimizer(options = LocalSearchSolvers.Options()) = new(options)
end

function mutually_exclusive(layer, w)
x = as_int(w)
l = length(layer)
return iszero(x) ? 1.0 : max(0.0, x - l)
end

no_empty_layer(x; X = nothing) = max(0, 1 - sum(x))

parameter_specific_operations(x; X = nothing) = 0.0

function CompositionalNetworks.optimize!(
icn, solutions, non_sltns, dom_size, metric, optimizer::ICNLocalSearchOptimizer; parameters...
)
@debug "starting debug opt"
m = model(; kind = :icn)
n = nbits(icn)

# All variables are boolean
d = domain([false, true])

# Add variables
foreach(_ -> variable!(m, d), 1:n)

# Add constraint
start = 1
for layer in layers(icn)
if exclu(layer)
stop = start + nbits_exclu(layer) - 1
f(x; X = nothing) = mutually_exclusive(layer, x)
constraint!(m, f, start:stop)
else
stop = start + length(layer) - 1
constraint!(m, no_empty_layer, start:stop)
end
start = stop + 1
end

# Add objective
inplace = zeros(dom_size, max_icn_length())

function fitness(w)
_w = BitVector(w)
compo = compose(icn, _w)
f = composition(compo)
S = Iterators.flatten((solutions, non_sltns))
@debug _w compo f S metric
σ = sum(x -> abs(f(x; X=inplace, dom_size, parameters...) - eval(metric)(x, solutions)), S)
return σ + regularization(icn) + weigths_bias(_w)
end

objective!(m, fitness)

# Create solver and solve
s = solver(m; options = optimizer.options)
solve!(s)
@debug "pool" s.pool best_values(s.pool) best_values(s) s.pool.configurations

# Return best values
best = BitVector(collect(best_values(s)))
weigths!(icn, best)

return best, Dictionary{BitVector, Int}([best], [1])
end
21 changes: 12 additions & 9 deletions src/icn/genetic.jl
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,12 @@ function _optimize!(
solutions,
non_sltns,
dom_size,
param,
metric,
pop_size,
iterations;
samples=nothing,
memoize=false,
parameters...
)
inplace = zeros(dom_size, max_icn_length())
_non_sltns = isnothing(samples) ? non_sltns : rand(non_sltns, samples)
Expand All @@ -31,10 +31,12 @@ function _optimize!(
compo = compose(icn, w)
f = composition(compo)
S = Iterators.flatten((solutions, _non_sltns))
return sum(x -> abs(f(x; X=inplace, param, dom_size) - metric(x, solutions)), S) +
regularization(icn) +
weigths_bias(w)
σ = sum(
x -> abs(f(x; X=inplace, dom_size, parameters...) - metric(x, solutions)), S
)
return σ + regularization(icn) + weigths_bias(w)
end

_fitness = memoize ? (@memoize Dict memoize_fitness(w) = fitness(w)) : fitness

_icn_ga = GA(;
Expand All @@ -44,7 +46,7 @@ function _optimize!(
selection=tournament(2),
crossover=SPX,
mutation=flip,
mutationRate=1.0,
mutationRate=1.0
)

pop = generate_population(icn, pop_size)
Expand All @@ -63,11 +65,11 @@ function optimize!(
global_iter,
iter,
dom_size,
param,
metric,
pop_size;
sampler=nothing,
memoize=false,
parameters...
)
results = Dictionary{BitVector,Int}()
aux_results = Vector{BitVector}(undef, global_iter)
Expand All @@ -83,12 +85,12 @@ function optimize!(
solutions,
non_sltns,
dom_size,
param,
eval(metric),
pop_size,
iter;
samples,
memoize,
parameters...
)
aux_results[i] = weigths(aux_icn)
end
Expand Down Expand Up @@ -117,7 +119,8 @@ function ICNGeneticOptimizer(;
end

function CompositionalNetworks.optimize!(
icn, solutions, non_sltns, dom_size, param, metric, optimizer::ICNGeneticOptimizer
icn, solutions, non_sltns, dom_size, metric, optimizer::ICNGeneticOptimizer;
parameters...
)
return optimize!(
icn,
Expand All @@ -126,11 +129,11 @@ function CompositionalNetworks.optimize!(
optimizer.global_iter,
optimizer.local_iter,
dom_size,
param,
metric,
optimizer.pop_size;
optimizer.sampler,
optimizer.memoize,
parameters...
)
end

Expand Down
2 changes: 1 addition & 1 deletion src/qubo/gradient.jl
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ function train!(Q, X, penalty, η, precision, X_test, oversampling, binarization
Q[:,:] = round.(precision*Q)

df = make_df(X_test, Q, penalty, binarization, domains)
return pretty_table(describe(df[!, [:penalty, :predict, :shifted, :accurate]]))
return pretty_table(DataFrames.describe(df[!, [:penalty, :predict, :shifted, :accurate]]))
end

function train(
Expand Down
3 changes: 3 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ using Test
domains = [domain([1,2,3,4]) for i in 1:4]
compo = icn(domains, allunique)
@test compo([1,2,3,3], dom_size = 4) > 0.0

compo = icn(domains, allunique; optimizer = ICNLocalSearchOptimizer())
# @test compo([1,2,3,3], dom_size = 4) > 0.0
end

@testset "QUBOConstraints.jl" begin
Expand Down

0 comments on commit 06ddd31

Please sign in to comment.