Skip to content

Commit

Permalink
Represent OptimizationTrace as a Vector (#257)
Browse files Browse the repository at this point in the history
  • Loading branch information
ahwillia authored and pkofod committed Aug 12, 2016
1 parent 013d264 commit f255ff4
Show file tree
Hide file tree
Showing 19 changed files with 38 additions and 53 deletions.
2 changes: 1 addition & 1 deletion src/accelerated_gradient_descent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ function optimize{T}(d::DifferentiableFunction,
lsr = LineSearchResults(T)

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@agdtrace

Expand Down
22 changes: 11 additions & 11 deletions src/api.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,35 +3,35 @@ minimizer(r::OptimizationResults) = r.minimum
minimum(r::OptimizationResults) = r.f_minimum
iterations(r::OptimizationResults) = r.iterations
iteration_limit_reached(r::OptimizationResults) = r.iteration_converged
trace(r::OptimizationResults) = length(r.trace.states) > 0 ? r.trace : error("No trace in optimization results. To get a trace, run optimize() with store_trace = true.")
trace(r::OptimizationResults) = length(r.trace) > 0 ? r.trace : error("No trace in optimization results. To get a trace, run optimize() with store_trace = true.")

function x_trace(r::UnivariateOptimizationResults)
tr = trace(r)
!haskey(tr.states[1].metadata, "x_minimum") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[state.metadata["x_minimum"] for state in tr.states]
!haskey(tr[1].metadata, "x_minimum") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[ state.metadata["x_minimum"] for state in tr ]
end
function x_lower_trace(r::UnivariateOptimizationResults)
tr = trace(r)
!haskey(tr.states[1].metadata, "x_lower") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[state.metadata["x_lower"] for state in tr.states]
!haskey(tr[1].metadata, "x_lower") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[ state.metadata["x_lower"] for state in tr ]
end
x_lower_trace(r::MultivariateOptimizationResults) = error("x_lower_trace is not implemented for $(method(r)).")
function x_upper_trace(r::UnivariateOptimizationResults)
tr = trace(r)
!haskey(tr.states[1].metadata, "x_upper") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[state.metadata["x_upper"] for state in tr.states]
!haskey(tr[1].metadata, "x_upper") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[ state.metadata["x_upper"] for state in tr ]
end
x_upper_trace(r::MultivariateOptimizationResults) = error("x_upper_trace is not implemented for $(method(r)).")

function x_trace(r::MultivariateOptimizationResults)
tr = trace(r)
!haskey(tr.states[1].metadata, "x") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[state.metadata["x"] for state in tr.states]
!haskey(tr[1].metadata, "x") && error("Trace does not contain x. To get a trace of x, run optimize() with extended_trace = true")
[ state.metadata["x"] for state in tr ]
end

f_trace(r::OptimizationResults) = [state.value for state in trace(r).states]
f_trace(r::OptimizationResults) = [ state.value for state in trace(r) ]
g_norm_trace(r::OptimizationResults) = error("g_norm_trace is not implemented for $(method(r)).")
g_norm_trace(r::MultivariateOptimizationResults) = [state.g_norm for state in trace(r).states]
g_norm_trace(r::MultivariateOptimizationResults) = [ state.g_norm for state in trace(r) ]

f_calls(r::OptimizationResults) = r.f_calls

Expand Down
2 changes: 1 addition & 1 deletion src/bfgs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ function optimize{T}(d::DifferentiableFunction,
I = eye(size(invH)...)

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@bfgstrace

Expand Down
2 changes: 1 addition & 1 deletion src/brent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ function optimize{T <: AbstractFloat}(
converged = false

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = store_trace || show_trace || extended_trace || callback != nothing
@brenttrace

Expand Down
2 changes: 1 addition & 1 deletion src/cg.jl
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ function optimize{T}(df::DifferentiableFunction,
lsr = LineSearchResults(T)

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@cgtrace

Expand Down
2 changes: 1 addition & 1 deletion src/golden_section.jl
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ function optimize{T <: AbstractFloat}(f::Function, x_lower::T, x_upper::T,
converged = false

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = store_trace || show_trace || extended_trace || callback != nothing
@goldensectiontrace

Expand Down
2 changes: 1 addition & 1 deletion src/gradient_descent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ function optimize{T}(d::DifferentiableFunction,
lsr = LineSearchResults(T)

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@gdtrace

Expand Down
2 changes: 1 addition & 1 deletion src/l_bfgs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ function optimize{T}(d::DifferentiableFunction,
twoloop_q, twoloop_alpha = Array(T, n), Array(T, mo.m)

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@lbfgstrace

Expand Down
2 changes: 1 addition & 1 deletion src/levenberg_marquardt.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ function levenberg_marquardt{T}(f::Function, g::Function, initial_x::AbstractVec
m_buffer = Vector{T}(m)

# Maintain a trace of the system.
tr = OptimizationTrace(LevenbergMarquardt())
tr = OptimizationTrace{typeof(LevenbergMarquardt())}()
if show_trace
d = Dict("lambda" => lambda)
os = OptimizationState(iterCt, sumabs2(fcur), NaN, d)
Expand Down
2 changes: 1 addition & 1 deletion src/momentum_gradient_descent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ function optimize{T}(d::DifferentiableFunction,
lsr = LineSearchResults(T)

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@mgdtrace

Expand Down
2 changes: 1 addition & 1 deletion src/nelder_mead.jl
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ function optimize{T}(f::Function,
# Maintain a trace
f_x_previous, f_x = NaN, nmobjective(f_simplex, m, n)
f_lowest = f_simplex[i_order[1]]
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.show_trace || o.store_trace || o.extended_trace || o.callback != nothing
@nmtrace

Expand Down
2 changes: 1 addition & 1 deletion src/newton.jl
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ function optimize{T}(d::TwiceDifferentiableFunction,
lsr = LineSearchResults(T)

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@newtontrace

Expand Down
2 changes: 1 addition & 1 deletion src/newton_trust_region.jl
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ function optimize{T}(d::TwiceDifferentiableFunction,
lambda = NaN

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@newton_tr_trace

Expand Down
2 changes: 1 addition & 1 deletion src/particle_swarm.jl
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ function optimize{T}(cost_function::Function,
X[j, 1] = initial_x[j]
X_best[j, 1] = initial_x[j]
end
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()

tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@swarmtrace
Expand Down
2 changes: 1 addition & 1 deletion src/simulated_annealing.jl
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ function optimize{T}(cost::Function,
best_f_x = f_x

# Trace the history of states visited
tr = OptimizationTrace(mo)
tr = OptimizationTrace{typeof(mo)}()
tracing = o.store_trace || o.show_trace || o.extended_trace || o.callback != nothing
@satrace

Expand Down
31 changes: 8 additions & 23 deletions src/types.jl
Original file line number Diff line number Diff line change
Expand Up @@ -46,15 +46,11 @@ immutable OptimizationState{T <: Optimizer}
metadata::Dict
end

immutable OptimizationTrace{T<:Optimizer}
states::Vector{OptimizationState{T}}
end

OptimizationTrace{T}(m::T) = OptimizationTrace(Array{OptimizationState{T}}(0))
typealias OptimizationTrace{T} Vector{OptimizationState{T}}

abstract OptimizationResults

type MultivariateOptimizationResults{T,N} <: OptimizationResults
type MultivariateOptimizationResults{T,N,M} <: OptimizationResults
method::String
initial_x::Array{T,N}
minimum::Array{T,N}
Expand All @@ -67,12 +63,12 @@ type MultivariateOptimizationResults{T,N} <: OptimizationResults
f_tol::Float64
g_converged::Bool
g_tol::Float64
trace::OptimizationTrace
trace::OptimizationTrace{M}
f_calls::Int
g_calls::Int
end

type UnivariateOptimizationResults{T} <: OptimizationResults
type UnivariateOptimizationResults{T,M} <: OptimizationResults
method::String
initial_lower::T
initial_upper::T
Expand All @@ -83,7 +79,7 @@ type UnivariateOptimizationResults{T} <: OptimizationResults
converged::Bool
rel_tol::Float64
abs_tol::Float64
trace::OptimizationTrace
trace::OptimizationTrace{M}
f_calls::Int
end

Expand All @@ -110,21 +106,10 @@ function Base.show(io::IO, t::OptimizationState)
return
end

Base.push!(t::OptimizationTrace, s::OptimizationState) = push!(t.states, s)
Base.getindex(t::OptimizationTrace, i::Integer) = getindex(t.states, i)
Base.endof(t::OptimizationTrace) = endof(t.states)
Base.length(t::OptimizationTrace) = length(t.states)

function Base.setindex!(t::OptimizationTrace,
s::OptimizationState,
i::Integer)
setindex!(t.states, s, i)
end

function Base.show(io::IO, t::OptimizationTrace)
function Base.show(io::IO, tr::OptimizationTrace)
@printf io "Iter Function value Gradient norm \n"
@printf io "------ -------------- --------------\n"
for state in t.states
for state in tr
show(io, state)
end
return
Expand Down Expand Up @@ -181,7 +166,7 @@ function Base.append!(a::MultivariateOptimizationResults, b::MultivariateOptimiz
a.x_converged = x_converged(b)
a.f_converged = f_converged(b)
a.g_converged = g_converged(b)
append!(a.trace.states, b.trace.states)
append!(a.trace, b.trace)
a.f_calls += f_calls(b)
a.g_calls += g_calls(b)
end
Expand Down
6 changes: 3 additions & 3 deletions test/callbacks.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ let
SimulatedAnnealing())
ot_run = false
cb = tr -> begin
@test tr.states[end].iteration % 3 == 0
@test tr[end].iteration % 3 == 0
ot_run = true
end
optimize(f, initial_x, method = method, callback = cb, show_every=3, store_trace=true)
Expand All @@ -33,7 +33,7 @@ let
MomentumGradientDescent())
ot_run = false
cb = tr -> begin
@test tr.states[end].iteration % 3 == 0
@test tr[end].iteration % 3 == 0
ot_run = true
end
optimize(d2, initial_x, method = method, callback = cb, show_every=3, store_trace=true)
Expand All @@ -51,7 +51,7 @@ let
for method in (Newton(),)
ot_run = false
cb = tr -> begin
@test tr.states[end].iteration % 3 == 0
@test tr[end].iteration % 3 == 0
ot_run = true
end
optimize(d3, initial_x, method = method, callback = cb, show_every=3, store_trace=true)
Expand Down
2 changes: 1 addition & 1 deletion test/newton_trust_region.jl
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ let
d = TwiceDifferentiableFunction(f, g!, h!)

results = Optim.optimize(d, [0.0], method=NewtonTrustRegion())
@assert length(results.trace.states) == 0
@assert length(results.trace) == 0
@assert results.g_converged
@assert norm(results.minimum - [5.0]) < 0.01

Expand Down
2 changes: 1 addition & 1 deletion test/types.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
let
solver = NelderMead()
T = typeof(solver)
trace = OptimizationTrace(solver)
trace = OptimizationTrace{T}()
push!(trace,OptimizationState{T}(1,1.0,1.0,Dict()))
push!(trace,OptimizationState{T}(2,1.0,1.0,Dict()))
@test length(trace) == 2
Expand Down

0 comments on commit f255ff4

Please sign in to comment.