Skip to content

Add a Metaheuristics.jl extension #90

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,17 @@ Unrolled = "9602ed7d-8fef-5bc8-8597-8f21381861e8"
[weakdeps]
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"
LocalSearchSolvers = "2b10edaa-728d-4283-ac71-07e312d6ccf3"
Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898"

[extensions]
GeneticExt = "Evolutionary"
LocalSearchSolversExt = "LocalSearchSolvers"
MetaheuristicsExt = "Metaheuristics"

[compat]
LocalSearchSolvers = "0.4"
Evolutionary = "0.11"
Metaheuristics = "3"
ConstraintCommons = "0.3"
ConstraintDomains = "0.4"
Dictionaries = "0.4"
Expand All @@ -42,6 +45,7 @@ julia = "1.10"
Aqua = "4c88cf16-eb10-579e-8560-4a9242c79595"
Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6"
LocalSearchSolvers = "2b10edaa-728d-4283-ac71-07e312d6ccf3"
Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898"
ExplicitImports = "7d51a73a-1435-4ff3-83d9-f097790105c7"
JET = "c3a54625-cd67-489e-a8e7-0a5a0ff4e31b"
Memoization = "6fafb56a-5788-4b4e-91ca-c0cea6611c73"
Expand All @@ -57,6 +61,7 @@ test = [
"Evolutionary",
"LocalSearchSolvers",
"Memoization",
"Metaheuristics",
"Test",
"TestItemRunner",
"ThreadPools",
Expand Down
145 changes: 145 additions & 0 deletions ext/MetaheuristicsExt/MetaheuristicsExt.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,145 @@
module MetaheuristicsExt

import CompositionalNetworks:
CompositionalNetworks, AbstractICN, Configurations, manhattan,
hamming
import CompositionalNetworks: MetaheuristicsOptimizer, apply!, weights_bias, regularization
import CompositionalNetworks: evaluate, solutions
import Metaheuristics: Metaheuristics, minimizer, GA, Algorithm, BitArraySpace

function generate_population(icn, pop_size; vect = [])
population = Vector{BitVector}()
if isempty(vect)
foreach(_ -> push!(population, falses(length(icn.weights))), 1:pop_size)
else
foreach(_ -> push!(population, vect), 1:pop_size)
end
return population
end

function CompositionalNetworks.MetaheuristicsOptimizer(backend;
maxiters = 400,
maxtime = 500,
extra_functions = Dict(),
bounds = nothing
)
if backend isa Metaheuristics.Algorithm{<:GA}
extra_functions[:generate_population] = generate_population
end

return MetaheuristicsOptimizer(maxiters, maxtime, backend, bounds, extra_functions)
end

function CompositionalNetworks.optimize!(
icn::T,
configurations::Configurations,
# dom_size,
metric_function::Union{Function, Vector{Function}},
optimizer_config::MetaheuristicsOptimizer;
samples = nothing,
memoize = false,
parameters...
) where {T <: AbstractICN}

# @info icn.weights

# inplace = zeros(dom_size, 18)
solution_iter = solutions(configurations)
non_solutions = solutions(configurations; non_solutions = true)
solution_vector = [i.x for i in solution_iter]

function fitness(w)
weights_validity = apply!(icn, w)

a = if metric_function isa Function
metric_function(
icn,
configurations,
solution_vector;
weights_validity = weights_validity,
parameters...
)
else
minimum(
met -> met(
icn,
configurations,
solution_vector;
weights_validity = weights_validity,
parameters...
),
metric_function
)
end

b = weights_bias(w)
c = regularization(icn)

function new_regularization(icn::AbstractICN)
start = 1
count = 0
total = 0
for (i, layer) in enumerate(icn.layers)
if !layer.mutex
ran = start:(start + icn.weightlen[i] - 1)
op = findall(icn.weights[ran])
max_op = ran .- (start - 1)
total += (sum(op) / sum(max_op))
count += 1
end
start += icn.weightlen[i]
end
return total / count
end

d = sum(findall(icn.weights)) /
(length(icn.weights) * (length(icn.weights) + 1) / 2)

e = new_regularization(icn)

# @info "Lot of things" a b c d e
#=
println("""
sum: $a
weights bias: $b
regularization: $c
new reg: $e
thread: $(Threads.threadid())
""") =#

return a + b + c
end

#=
_icn_ga = GA(;
populationSize = optimizer_config.pop_size,
crossoverRate = 0.8,
epsilon = 0.05,
selection = tournament(4),
crossover = SPX,
mutation = flip,
mutationRate = 1.0
)
=#

# pop = generate_population(icn, optimizer_config.pop_size)
bounds = optimizer_config.bounds

bounds = if isnothing(bounds)
if optimizer_config.backend isa Metaheuristics.Algorithm{<:GA}
BitArraySpace(length(icn.weights))
end
else
optimizer_config.bounds
end

r = Metaheuristics.optimize(
fitness,
bounds,
optimizer_config.backend
)
validity = apply!(icn, Metaheuristics.minimizer(r))
return icn => validity
end

end
3 changes: 2 additions & 1 deletion src/CompositionalNetworks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@ import Unrolled: @unroll

# SECTION - Exports
export hamming, minkowski, manhattan, weights_bias
export AbstractOptimizer, GeneticOptimizer, LocalSearchOptimizer, optimize!
export AbstractOptimizer, GeneticOptimizer, LocalSearchOptimizer, optimize!,
MetaheuristicsOptimizer
export generate_configurations, explore_learn
export AbstractLayer,
Transformation, Aggregation, LayerCore, Arithmetic, Comparison, SimpleFilter,
Expand Down
4 changes: 4 additions & 0 deletions src/configuration.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@ struct NonSolution <: AbstractSolution
x::Any
end

struct UnknownSolution <: AbstractSolution
x::Any
end

const Configuration{T} = T where {T <: AbstractSolution} # alias

const Configurations{N} = Set{<:Configuration}
Expand Down
16 changes: 13 additions & 3 deletions src/icn.jl
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ function generate_new_valid_weights!(icn::T) where {T <: AbstractICN}
nothing
end

function apply!(icn::AbstractICN, weights::BitVector)::Bool
function apply!(icn::AbstractICN, weights::AbstractVector{Bool})::Bool
icn.weights .= weights
return check_weights_validity(icn, weights)
end
Expand Down Expand Up @@ -106,6 +106,16 @@ function evaluate(
end
end

function evaluate(
icn::AbstractICN,
config::AbstractVector;
weights_validity = true,
parameters...
)
evaluate(
icn, UnknownSolution(config); weights_validity = weights_validity, parameters...)
end

function evaluate(
icns::Vector{<:AbstractICN},
config::Configuration;
Expand Down Expand Up @@ -168,7 +178,7 @@ function evaluate(icn::Nothing, config::Configuration)
end
=#

(icn::AbstractICN)(weights::BitVector) = apply!(icn, weights)
(icn::AbstractICN)(weights::AbstractVector{Bool}) = apply!(icn, weights)
(icn::AbstractICN)(config::Configuration) = evaluate(icn, config)

struct ICN{S} <: AbstractICN where {S <: Union{AbstractVector{<:AbstractLayer}, Nothing}}
Expand All @@ -179,7 +189,7 @@ struct ICN{S} <: AbstractICN where {S <: Union{AbstractVector{<:AbstractLayer},
weightlen::AbstractVector{Int}
constants::Dict
function ICN(;
weights = BitVector[],
weights = AbstractVector{Bool}[],
parameters = Symbol[],
layers = [Transformation, Arithmetic, Aggregation, Comparison],
connection = UInt32[1, 2, 3, 4],
Expand Down
2 changes: 1 addition & 1 deletion src/layer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ struct LayerCore <: AbstractLayer
mutex::Bool
argtype::Pair
fnexprs::NamedTuple{
names, T} where {names, T <: Tuple{Vararg{<:Union{Symbol, JLFunction}}}}
names, T} where {names, T <: Tuple{Vararg{Union{Symbol, JLFunction}}}}
fn::NamedTuple{names, T} where {names, T <: Tuple{Vararg{Function}}}
function LayerCore(name::Symbol, mutex::Bool, Q::Pair, fnexprs)
fnexprs = map(x -> JLFunction(x), fnexprs)
Expand Down
4 changes: 1 addition & 3 deletions src/layers/aggregation.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@ const Aggregation = LayerCore(
(
sum = :((x) -> sum(x)),
count_positive = :((x) -> count(i -> i > 0, x)),
count_op_val = :((x; val, op) -> count(i -> op(i, val), x)),
maximum = :((x) -> isempty(x) ? typemax(eltype(x)) : maximum(x)),
minimum = :((x) -> isempty(x) ? typemax(eltype(x)) : minimum(x))
count_op_val = :((x; val, op) -> count(i -> op(i, val), x)) # maximum = :((x) -> isempty(x) ? typemax(eltype(x)) : maximum(x)), # minimum = :((x) -> isempty(x) ? typemax(eltype(x)) : minimum(x))
)
)

Expand Down
3 changes: 2 additions & 1 deletion src/learn_and_explore.jl
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ function explore_learn(
configurations = generate_configurations(concept, domains; parameters...)
end

icn.constants[:dom_size] = maximum(length, domains)
icn.constants[:dom_size] = maximum(
x -> maximum(x.domain) - minimum(x.domain) + 1, domains)
icn.constants[:numvars] = length(domains)

return optimize!(
Expand Down
59 changes: 59 additions & 0 deletions src/optimizer.jl
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,65 @@ end
)[2]
end

# SECTION - Metaheuristics Extension
struct MetaheuristicsOptimizer <: AbstractOptimizer
maxiters::Int64
maxtime::Float64
backend::Any
bounds::Any
extra_functions::Dict{Symbol, Function}
end

@testitem "Metaheuristics" tags=[:extension] default_imports=false begin
import CompositionalNetworks:
Transformation, Arithmetic, Aggregation, Comparison, ICN,
SimpleFilter
import CompositionalNetworks: MetaheuristicsOptimizer, explore_learn
import ConstraintDomains: domain
import Metaheuristics: GA
import Test: @test

test_icn = ICN(;
parameters = [:dom_size, :numvars, :val],
layers = [SimpleFilter, Transformation, Arithmetic, Aggregation, Comparison],
connection = [1, 2, 3, 4]
)

function allunique_val(x; val)
for i in 1:(length(x) - 1)
for j in (i + 1):length(x)
if x[i] == x[j]
if x[i] != val
return false
end
end
end
end
return true
end

function allunique_vals(x; vals)
for i in 1:(length(x) - 1)
for j in (i + 1):length(x)
if x[i] == x[j]
if !(x[i] in vals)
return false
end
end
end
end
return true
end

@test explore_learn(
[domain([1, 2, 3, 4]) for i in 1:4],
allunique_val,
MetaheuristicsOptimizer(GA()), # use whatever optimizer here as provided by Metaheuristics.jl, I'm using GA
icn = test_icn,
val = 3
)[2]
end

# SECTION - CBLSOptimizer Extension
struct LocalSearchOptimizer <: AbstractOptimizer
options::Any
Expand Down
6 changes: 3 additions & 3 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ using Test
using TestItemRunner

@testset "Package tests: ConstraintCommons" begin
# include("Aqua.jl")
# include("ExplicitImports.jl")
# include("JET.jl")
include("Aqua.jl")
include("ExplicitImports.jl")
include("JET.jl")
include("TestItemRunner.jl")
end
Loading