@@ -101,6 +101,7 @@ function optimize(fg, x, alg::ConjugateGradient;
101101 @info @sprintf (" CG: initializing with f = %.12f, ‖∇f‖ = %.4e" , f, normgrad)
102102 local xprev, gprev, Pgprev, ηprev
103103 while ! (_hasconverged || _shouldstop)
104+ told = t
104105 # compute new search direction
105106 if precondition === _precondition
106107 Pg = g
@@ -140,6 +141,7 @@ function optimize(fg, x, alg::ConjugateGradient;
140141 push! (fhistory, f)
141142 push! (normgradhistory, normgrad)
142143 t = time () - t₀
144+ Δt = t - told
143145 _hasconverged = hasconverged (x, f, g, normgrad)
144146 _shouldstop = shouldstop (x, f, g, numfg, numiter, t)
145147
@@ -148,8 +150,8 @@ function optimize(fg, x, alg::ConjugateGradient;
148150 break
149151 end
150152 verbosity >= 3 &&
151- @info @sprintf (" CG: iter %4d, time %7.2f s: f = %.12f, ‖∇f‖ = %.4e, α = %.2e, β = %.2e, nfg = %d" ,
152- numiter, t , f, normgrad, α, β, nfg)
153+ @info @sprintf (" CG: iter %4d, Δt %7.2f s: f = %.12f, ‖∇f‖ = %.4e, α = %.2e, β = %.2e, nfg = %d" ,
154+ numiter, Δt , f, normgrad, α, β, nfg)
153155
154156 # transport gprev, ηprev and vectors in Hessian approximation to x
155157 gprev = transport! (gprev, xprev, ηprev, α, x)
0 commit comments