Skip to content

Commit 22fd1bb

Browse files
committed
basic newton solver
1 parent 34dd316 commit 22fd1bb

File tree

1 file changed

+39
-15
lines changed

1 file changed

+39
-15
lines changed

src/numericalnim/optimize.nim

Lines changed: 39 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,28 @@ proc steepestDescent*[U, T](f: proc(x: Tensor[U]): T, x0: Tensor[U], alpha: U =
151151
gradNorm = vectorNorm(gradient)
152152
iters += 1
153153
if iters >= 10000:
154-
echo "Limit of 10000 iterations reached!"
154+
discard "Limit of 10000 iterations reached!"
155+
#echo iters, " iterations done!"
156+
result = x
157+
158+
proc newton*[U, T](f: proc(x: Tensor[U]): T, x0: Tensor[U], alpha: U = U(0.1), tol: U = U(1e-6), fastMode: bool = false): Tensor[U] =
159+
var x = x0.clone()
160+
var fNorm = abs(f(x))
161+
var gradient = tensorGradient(f, x, fastMode=fastMode)
162+
var gradNorm = vectorNorm(gradient)
163+
var hessian = tensorHessian(f, x)
164+
var iters: int
165+
while gradNorm > tol*(1 + fNorm) and iters < 10000:
166+
let p = -solve(hessian, gradient)
167+
x += alpha * p
168+
let fx = f(x)
169+
fNorm = abs(fx)
170+
gradient = tensorGradient(f, x, fastMode=fastMode)
171+
gradNorm = vectorNorm(gradient)
172+
hessian = tensorHessian(f, x)
173+
iters += 1
174+
if iters >= 10000:
175+
discard "Limit of 10000 iterations reached!"
155176
#echo iters, " iterations done!"
156177
result = x
157178

@@ -201,25 +222,28 @@ proc levmarq*[U, T](f: proc(params: Tensor[U], x: U): T, params0: Tensor[U], xDa
201222
when isMainModule:
202223
import benchy
203224
# Steepest descent:
204-
#[ proc f1(x: Tensor[float]): float =
205-
result = x[0]*x[0] + x[1]*x[1] - 10
225+
proc f1(x: Tensor[float]): float =
226+
# result = -20*exp(-0.2*sqrt(0.5 * (x[0]*x[0] + x[1]*x[1]))) - exp(0.5*(cos(2*PI*x[0]) + cos(2*PI*x[1]))) + E + 20 # Ackley
227+
result = (1 - x[0])^2 + 100*(x[1] - x[0]^2)^2
206228

207-
let x0 = [10.0, 10.0].toTensor
208-
echo eye[float](10)
209-
let sol1 = steepestDescent(f1, x0, tol=1e-10, fastMode=false)
210-
let sol2 = steepestDescent(f1, x0, tol=1e-10, fastMode=true)
229+
let x0 = [-1.0, -1.0].toTensor
230+
let sol1 = steepestDescent(f1, x0, tol=1e-8, alpha=0.001, fastMode=true)
211231
echo sol1
212-
echo sol2
213232
echo f1(sol1)
214-
echo f1(sol2)
233+
echo "Newton: ", newton(f1, x0, tol=1e-8, fastMode=false)
234+
echo "Newton: ", newton(f1, x0, tol=1e-8, fastMode=true)
215235

216-
timeIt "slow mode":
217-
keep steepestDescent(f1, x0, tol=1e-10, fastMode=false)
218-
timeIt "fast mode":
219-
keep steepestDescent(f1, x0, tol=1e-10, fastMode=true) ]#
236+
timeIt "steepest slow mode":
237+
keep steepestDescent(f1, x0, tol=1e-8, alpha=0.001, fastMode=false)
238+
timeIt "steepest fast mode":
239+
keep steepestDescent(f1, x0, tol=1e-8, alpha=0.001, fastMode=true)
240+
timeIt "newton slow mode":
241+
keep newton(f1, x0, tol=1e-8, fastMode=false)
242+
timeIt "newton fast mode":
243+
keep newton(f1, x0, tol=1e-8, fastMode=true)
220244

221245
# Lev-Marq:
222-
proc fFit(params: Tensor[float], x: float): float =
246+
#[ proc fFit(params: Tensor[float], x: float): float =
223247
params[0] + params[1] * x + params[2] * x*x
224248
225249
let xData = arraymancer.linspace(0, 10, 100)
@@ -229,7 +253,7 @@ when isMainModule:
229253
timeIt "slow mode":
230254
keep levmarq(fFit, params0, xData, yData, fastMode=false)
231255
timeIt "fast mode":
232-
keep levmarq(fFit, params0, xData, yData, fastMode=true)
256+
keep levmarq(fFit, params0, xData, yData, fastMode=true) ]#
233257

234258

235259

0 commit comments

Comments
 (0)