Skip to content

Commit 69d7f9b

Browse files
committed
Update cost function and time formatting
1 parent 14124d3 commit 69d7f9b

File tree

4 files changed

+27
-21
lines changed

4 files changed

+27
-21
lines changed

src/OptimKit.jl

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,6 +104,12 @@ Also see [`GradientDescent`](@ref), [`ConjugateGradient`](@ref), [`LBFGS`](@ref)
104104
"""
105105
function optimize end
106106

107+
function format_time(t::Float64)
108+
return t < 60 ? @sprintf("%.2f s", t) :
109+
t < 2600 ? @sprintf("%.2f m", t / 60) :
110+
@sprintf("%.2f h", t / 3600)
111+
end
112+
107113
include("linesearches.jl")
108114
include("gd.jl")
109115
include("cg.jl")

src/cg.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ function optimize(fg, x, alg::ConjugateGradient;
9898

9999
numiter = 0
100100
verbosity >= 2 &&
101-
@info @sprintf("CG: initializing with f = %.12f, ‖∇f‖ = %.4e", f, normgrad)
101+
@info @sprintf("CG: initializing with f = %.12e, ‖∇f‖ = %.4e", f, normgrad)
102102
local xprev, gprev, Pgprev, ηprev
103103
while !(_hasconverged || _shouldstop)
104104
told = t
@@ -150,8 +150,8 @@ function optimize(fg, x, alg::ConjugateGradient;
150150
break
151151
end
152152
verbosity >= 3 &&
153-
@info @sprintf("CG: iter %4d, Δt %7.2f s: f = %.12f, ‖∇f‖ = %.4e, α = %.2e, β = %.2e, nfg = %d",
154-
numiter, Δt, f, normgrad, α, β, nfg)
153+
@info @sprintf("CG: iter %4d, Δt %s: f = %.12e, ‖∇f‖ = %.4e, α = %.2e, β = %.2e, nfg = %d",
154+
numiter, format_time(Δt), f, normgrad, α, β, nfg)
155155

156156
# transport gprev, ηprev and vectors in Hessian approximation to x
157157
gprev = transport!(gprev, xprev, ηprev, α, x)
@@ -167,12 +167,12 @@ function optimize(fg, x, alg::ConjugateGradient;
167167
end
168168
if _hasconverged
169169
verbosity >= 2 &&
170-
@info @sprintf("CG: converged after %d iterations and time %.2f s: f = %.12f, ‖∇f‖ = %.4e",
171-
numiter, t, f, normgrad)
170+
@info @sprintf("CG: converged after %d iterations and time %s: f = %.12e, ‖∇f‖ = %.4e",
171+
numiter, format_time(t), f, normgrad)
172172
else
173173
verbosity >= 1 &&
174-
@warn @sprintf("CG: not converged to requested tol after %d iterations and time %.2f s: f = %.12f, ‖∇f‖ = %.4e",
175-
numiter, t, f, normgrad)
174+
@warn @sprintf("CG: not converged to requested tol after %d iterations and time %s: f = %.12e, ‖∇f‖ = %.4e",
175+
numiter, format_time(t), f, normgrad)
176176
end
177177
history = [fhistory normgradhistory]
178178
return x, f, g, numfg, history

src/gd.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ function optimize(fg, x, alg::GradientDescent;
7676

7777
numiter = 0
7878
verbosity >= 2 &&
79-
@info @sprintf("GD: initializing with f = %.12f, ‖∇f‖ = %.4e", f, normgrad)
79+
@info @sprintf("GD: initializing with f = %.12e, ‖∇f‖ = %.4e", f, normgrad)
8080
while !(_hasconverged || _shouldstop)
8181
told = t
8282
# compute new search direction
@@ -107,20 +107,20 @@ function optimize(fg, x, alg::GradientDescent;
107107
break
108108
end
109109
verbosity >= 3 &&
110-
@info @sprintf("GD: iter %4d, Δt %7.2f s: f = %.12f, ‖∇f‖ = %.4e, α = %.2e, nfg = %d",
111-
numiter, Δt, f, normgrad, α, nfg)
110+
@info @sprintf("GD: iter %4d, Δt %s: f = %.12e, ‖∇f‖ = %.4e, α = %.2e, nfg = %d",
111+
numiter, format_time(Δt), f, normgrad, α, nfg)
112112

113113
# increase α for next step
114114
α = 2 * α
115115
end
116116
if _hasconverged
117117
verbosity >= 2 &&
118-
@info @sprintf("GD: converged after %d iterations and time %.2f s: f = %.12f, ‖∇f‖ = %.4e",
119-
numiter, t, f, normgrad)
118+
@info @sprintf("GD: converged after %d iterations and time %s: f = %.12e, ‖∇f‖ = %.4e",
119+
numiter, format_time(t), f, normgrad)
120120
else
121121
verbosity >= 1 &&
122-
@warn @sprintf("GD: not converged to requested tol after %d iterations and time %.2f s: f = %.12f, ‖∇f‖ = %.4e",
123-
numiter, t, f, normgrad)
122+
@warn @sprintf("GD: not converged to requested tol after %d iterations and time %s: f = %.12e, ‖∇f‖ = %.4e",
123+
numiter, format_time(t), f, normgrad)
124124
end
125125
history = [fhistory normgradhistory]
126126
return x, f, g, numfg, history

src/lbfgs.jl

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ function optimize(fg, x, alg::LBFGS;
8080
H = LBFGSInverseHessian(m, TangentType[], TangentType[], ScalarType[])
8181

8282
verbosity >= 2 &&
83-
@info @sprintf("LBFGS: initializing with f = %.12f, ‖∇f‖ = %.4e", f, normgrad)
83+
@info @sprintf("LBFGS: initializing with f = %.12e, ‖∇f‖ = %.4e", f, normgrad)
8484

8585
while !(_hasconverged || _shouldstop)
8686
told = t
@@ -127,8 +127,8 @@ function optimize(fg, x, alg::LBFGS;
127127
break
128128
end
129129
verbosity >= 3 &&
130-
@info @sprintf("LBFGS: iter %4d, Δt %7.2f: f = %.12f, ‖∇f‖ = %.4e, α = %.2e, m = %d, nfg = %d",
131-
numiter, Δt, f, normgrad, α, length(H), nfg)
130+
@info @sprintf("LBFGS: iter %4d, Δt %s: f = %.12e, ‖∇f‖ = %.4e, α = %.2e, m = %d, nfg = %d",
131+
numiter, format_time(Δt), f, normgrad, α, length(H), nfg)
132132

133133
# transport gprev, ηprev and vectors in Hessian approximation to x
134134
gprev = transport!(gprev, xprev, ηprev, α, x)
@@ -192,12 +192,12 @@ function optimize(fg, x, alg::LBFGS;
192192
end
193193
if _hasconverged
194194
verbosity >= 2 &&
195-
@info @sprintf("LBFGS: converged after %d iterations and time %.2f s: f = %.12f, ‖∇f‖ = %.4e",
196-
numiter, t, f, normgrad)
195+
@info @sprintf("LBFGS: converged after %d iterations and time %s: f = %.12e, ‖∇f‖ = %.4e",
196+
numiter, format_time(t), f, normgrad)
197197
else
198198
verbosity >= 1 &&
199-
@warn @sprintf("LBFGS: not converged to requested tol after %d iterations and time %.2f s: f = %.12f, ‖∇f‖ = %.4e",
200-
numiter, t, f, normgrad)
199+
@warn @sprintf("LBFGS: not converged to requested tol after %d iterations and time %s: f = %.12e, ‖∇f‖ = %.4e",
200+
numiter, format_time(t), f, normgrad)
201201
end
202202
history = [fhistory normgradhistory]
203203
return x, f, g, numfg, history

0 commit comments

Comments
 (0)