Skip to content

Commit

Permalink
add threading support
Browse files Browse the repository at this point in the history
  • Loading branch information
baggepinnen committed Mar 20, 2021
1 parent 551d120 commit d10b57d
Show file tree
Hide file tree
Showing 4 changed files with 36 additions and 5 deletions.
4 changes: 3 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "Hyperopt"
uuid = "93e5fe13-2215-51db-baaf-2e9a34fb2712"
author = ["Fredrik Bagge Carlson <[email protected]>"]
version = "0.4.3"
version = "0.4.4"

[deps]
BayesianOptimization = "4c6ed407-134f-591c-93fa-e0f7c164a0ec"
Expand All @@ -15,6 +15,7 @@ Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
ThreadPools = "b189fb0b-2eb5-4ed4-bc0c-d34c51242431"

[compat]
BayesianOptimization = "0.2"
Expand All @@ -23,6 +24,7 @@ Juno = "0.7, 0.8"
LatinHypercubeSampling = "1.2"
MacroTools = "0.5"
RecipesBase = "0.7, 0.8, 1.0"
ThreadPools = "1"
julia = "1.5"

[extras]
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -206,4 +206,5 @@ end
```

# Parallel execution
The macro `@phyperopt` works in the same way as `@hyperopt` but distributes all computation on available workers. The usual caveats apply, code must be loaded on all workers etc.
- The macro `@phyperopt` works in the same way as `@hyperopt` but distributes all computation on available workers. The usual caveats apply, code must be loaded on all workers etc.
- The macro `@thyperopt` uses `ThreadPools.tmap` to evaluate the objective on all available threads. Beware of high memory consumption if your objective allocates a lot of memory.
20 changes: 17 additions & 3 deletions src/Hyperopt.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
module Hyperopt

export Hyperoptimizer, @hyperopt, @phyperopt, printmin, printmax
export Hyperoptimizer, @hyperopt, @phyperopt, @thyperopt, printmin, printmax
export RandomSampler, BlueNoiseSampler, LHSampler, CLHSampler, Continuous, Categorical, GPSampler, Max, Min, Hyperband

using Base.Threads: threadid, nthreads
Expand All @@ -12,6 +12,7 @@ using RecipesBase
using Distributed
using LatinHypercubeSampling
using BayesianOptimization, GaussianProcesses
using ThreadPools

const HO_RNG = [MersenneTwister(rand(1:1000)) for _ in 1:nthreads()]

Expand Down Expand Up @@ -138,7 +139,7 @@ macro hyperopt(ex)
end
end

function pmacrobody(ex, params, ho_)
function pmacrobody(ex, params, ho_, pmap=pmap)
quote
function workaround_function()
ho = $(ho_)
Expand All @@ -147,7 +148,7 @@ function pmacrobody(ex, params, ho_)
# reassign the original array and then append the new history. If a new array is used, the change will not be visible in the original hyperoptimizer
hist = ho.history
ho.history = []
res = pmap(1:ho.iterations) do i
res = $(pmap)(1:ho.iterations) do i
$(Expr(:tuple, esc.(params)...)),_ = iterate(ho,i)
res = $(esc(ex.args[2])) # ex.args[2] = Body of the For loop

Expand All @@ -162,13 +163,26 @@ function pmacrobody(ex, params, ho_)
end
end

"""
Same as `@hyperopt` but uses `Distributed.pmap` for parallel evaluation of the cost function.
"""
macro phyperopt(ex)
pre = preprocess_expression(ex)
pre[3].args[1] === :GPSampler && error("We currently do not support running the GPSampler in parallel. If this is an issue, open an issue ;)")
ho_ = create_ho(pre...)
pmacrobody(ex, pre[1], ho_)
end

"""
Same as `@hyperopt` but uses `ThreadPools.tmap` for multithreaded evaluation of the cost function.
"""
macro thyperopt(ex)
pre = preprocess_expression(ex)
pre[3].args[1] === :GPSampler && error("We currently do not support running the GPSampler in parallel. If this is an issue, open an issue ;)")
ho_ = create_ho(pre...)
pmacrobody(ex, pre[1], ho_, ThreadPools.tmap)
end

function Base.minimum(ho::Hyperoptimizer)
m,i = findmin(replace(ho.results, NaN => Inf))
m
Expand Down
14 changes: 14 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,20 @@ f(a,b=true;c=10) = sum(@. 100 + (a-3)^2 + (b ? 10 : 20) + (c-100)^2) # This func
@info "Testing Parallel"

rmprocs(workers())


horp = @thyperopt for i=300, sampler=RandomSampler(), a = LinRange(1,5,50), b = [true, false], c = exp10.(LinRange(-1,3,50))
# println(i, "\t", a, "\t", b, "\t", c)
f(a,b,c=c)
end
@test minimum(horp) < 300
@test length(horp.history) == 300
@test length(horp.results) == 300
@test all(1:300) do i
f(horp.history[i][1:2]..., c=horp.history[i][3]) == horp.results[i]
end


horp = @phyperopt for i=300, sampler=RandomSampler(), a = LinRange(1,5,50), b = [true, false], c = exp10.(LinRange(-1,3,50))
# println(i, "\t", a, "\t", b, "\t", c)
f(a,b,c=c)
Expand Down

0 comments on commit d10b57d

Please sign in to comment.