Skip to content

Commit

Permalink
Add simple InitialStatic alpha choice by providing just a number to t… (
Browse files Browse the repository at this point in the history
#812)

* Add simple InitialStatic alpha choice by providing just a number to the alphaguess keyword in method constructors.

* Add Optim qaulifyier.
  • Loading branch information
pkofod authored Apr 30, 2020
1 parent 990c1b3 commit 7b66048
Show file tree
Hide file tree
Showing 10 changed files with 25 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ function AcceleratedGradientDescent(;
alphaguess = LineSearches.InitialPrevious(), # TODO: investigate good defaults
linesearch = LineSearches.HagerZhang(), # TODO: investigate good defaults
manifold::Manifold=Flat())
AcceleratedGradientDescent(alphaguess, linesearch, manifold)
AcceleratedGradientDescent(_alphaguess(alphaguess), linesearch, manifold)
end

mutable struct AcceleratedGradientDescentState{T, Tx} <: AbstractOptimizerState
Expand Down
2 changes: 1 addition & 1 deletion src/multivariate/solvers/first_order/bfgs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ function BFGS(; alphaguess = LineSearches.InitialStatic(), # TODO: benchmark def
initial_invH = nothing,
initial_stepnorm = nothing,
manifold::Manifold=Flat())
BFGS(alphaguess, linesearch, initial_invH, initial_stepnorm, manifold)
BFGS(_alphaguess(alphaguess), linesearch, initial_invH, initial_stepnorm, manifold)
end

mutable struct BFGSState{Tx, Tm, T,G} <: AbstractOptimizerState
Expand Down
2 changes: 1 addition & 1 deletion src/multivariate/solvers/first_order/cg.jl
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ function ConjugateGradient(; alphaguess = LineSearches.InitialHagerZhang(),

ConjugateGradient(eta,
P, precondprep,
alphaguess, linesearch,
_alphaguess(alphaguess), linesearch,
manifold)
end

Expand Down
2 changes: 1 addition & 1 deletion src/multivariate/solvers/first_order/gradient_descent.jl
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ function GradientDescent(; alphaguess = LineSearches.InitialPrevious(), # TODO:
P = nothing,
precondprep = (P, x) -> nothing,
manifold::Manifold=Flat())
GradientDescent(alphaguess, linesearch, P, precondprep, manifold)
GradientDescent(_alphaguess(alphaguess), linesearch, P, precondprep, manifold)
end

mutable struct GradientDescentState{Tx, T} <: AbstractOptimizerState
Expand Down
2 changes: 1 addition & 1 deletion src/multivariate/solvers/first_order/l_bfgs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ function LBFGS(; m::Integer = 10,
precondprep = (P, x) -> nothing,
manifold::Manifold=Flat(),
scaleinvH0::Bool = true && (typeof(P) <: Nothing) )
LBFGS(Int(m), alphaguess, linesearch, P, precondprep, manifold, scaleinvH0)
LBFGS(Int(m), _alphaguess(alphaguess), linesearch, P, precondprep, manifold, scaleinvH0)
end

Base.summary(::LBFGS) = "L-BFGS"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ function MomentumGradientDescent(; mu::Real = 0.01,
alphaguess = LineSearches.InitialPrevious(), # TODO: investigate good defaults
linesearch = LineSearches.HagerZhang(), # TODO: investigate good defaults
manifold::Manifold=Flat())
MomentumGradientDescent(mu, alphaguess, linesearch, manifold)
MomentumGradientDescent(mu, _alphaguess(alphaguess), linesearch, manifold)
end

mutable struct MomentumGradientDescentState{Tx, T} <: AbstractOptimizerState
Expand Down
4 changes: 2 additions & 2 deletions src/multivariate/solvers/first_order/ngmres.jl
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ function NGMRES(;manifold::Manifold = Flat(),
ϵ0 = 1e-12, # ϵ0 = 1e-12 -- number was an arbitrary choice#
wmax::Int = 10) # wmax = 10 -- number was an arbitrary choice to match L-BFGS field `m`
@assert manifold == nlprecon.manifold
NGMRES(alphaguess, linesearch, manifold, nlprecon, nlpreconopts, ϵ0, wmax)
NGMRES(_alphaguess(alphaguess), linesearch, manifold, nlprecon, nlpreconopts, ϵ0, wmax)
end

"""
Expand Down Expand Up @@ -116,7 +116,7 @@ function OACCEL(;manifold::Manifold = Flat(),
ϵ0 = 1e-12, # ϵ0 = 1e-12 -- number was an arbitrary choice
wmax::Int = 10) # wmax = 10 -- number was an arbitrary choice to match L-BFGS field `m`
@assert manifold == nlprecon.manifold
OACCEL(alphaguess, linesearch, manifold, nlprecon, nlpreconopts, ϵ0, wmax)
OACCEL(_alphaguess(alphaguess), linesearch, manifold, nlprecon, nlpreconopts, ϵ0, wmax)
end


Expand Down
2 changes: 1 addition & 1 deletion src/multivariate/solvers/second_order/newton.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ Wright (ch. 6, 1999) for a discussion of Newton's method in practice.
"""
function Newton(; alphaguess = LineSearches.InitialStatic(), # Good default for Newton
linesearch = LineSearches.HagerZhang()) # Good default for Newton
Newton(alphaguess, linesearch)
Newton(_alphaguess(alphaguess), linesearch)
end

Base.summary(::Newton) = "Newton's Method"
Expand Down
3 changes: 3 additions & 0 deletions src/utilities/perform_linesearch.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
# return true if the direction was changed
reset_search_direction!(state, d, method) = false # no-op

_alphaguess(a) = a
_alphaguess(a::Number) = LineSearches.InitialStatic(alpha=a)

function reset_search_direction!(state, d, method::BFGS)
n = length(state.x)
T = eltype(state.x)
Expand Down
13 changes: 13 additions & 0 deletions test/multivariate/optimize/interface.jl
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,19 @@
@test norm(Optim.minimum(ref)-Optim.minimum(r)) < 1e-6
end
end
# simple tests for https://github.com/JuliaNLSolvers/Optim.jl/issues/805
@test AcceleratedGradientDescent(alphaguess=1.0).alphaguess! isa Optim.LineSearches.InitialStatic
@test BFGS(alphaguess=1.0).alphaguess! isa Optim.LineSearches.InitialStatic
@test ConjugateGradient(alphaguess=1.0).alphaguess! isa Optim.LineSearches.InitialStatic
@test GradientDescent(alphaguess=1.0).alphaguess! isa Optim.LineSearches.InitialStatic
@test MomentumGradientDescent(alphaguess=1.0).alphaguess! isa Optim.LineSearches.InitialStatic
@test Newton(alphaguess=1.0).alphaguess! isa Optim.LineSearches.InitialStatic
optimize(od, problem.initial_x, AcceleratedGradientDescent(alphaguess=1.0))
optimize(od, problem.initial_x, BFGS(alphaguess=1.0))
optimize(od, problem.initial_x, ConjugateGradient(alphaguess=1.0))
optimize(od, problem.initial_x, GradientDescent(alphaguess=1.0))
optimize(od, problem.initial_x, MomentumGradientDescent(alphaguess=1.0))
optimize(td, problem.initial_x, Newton(alphaguess=1.0))
end

@testset "only_fg!, only_fgh!" begin
Expand Down

0 comments on commit 7b66048

Please sign in to comment.