Skip to content

Commit

Permalink
Merge pull request #31 from ringabout/master
Browse files Browse the repository at this point in the history
add patches for stricteffects
  • Loading branch information
HugoGranstrom committed Oct 7, 2022
2 parents 278353f + 8076350 commit a3e8cf0
Showing 1 changed file with 8 additions and 1 deletion.
9 changes: 8 additions & 1 deletion src/numericalnim/optimize.nim
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ import std/[strformat, sequtils, math, deques]
import arraymancer
import ./differentiate

when not defined(nimHasEffectsOf):
{.pragma: effectsOf.}

proc steepest_descent*(deriv: proc(x: float64): float64, start: float64, gamma: float64 = 0.01, precision: float64 = 1e-5, max_iters: Natural = 1000):float64 {.inline.} =
## Gradient descent optimization algorithm for finding local minimums of a function with derivative 'deriv'
##
Expand Down Expand Up @@ -75,7 +78,11 @@ proc conjugate_gradient*[T](A, b, x_0: Tensor[T], tolerance: float64): Tensor[T]
rsold = rsnew


proc newtons*(f: proc(x: float64): float64, deriv: proc(x: float64): float64, start: float64, precision: float64 = 1e-5, max_iters: Natural = 1000): float64 {.raises: [ArithmeticError].} =
proc newtons*(f: proc(x: float64): float64,
deriv: proc(x: float64): float64,
start: float64, precision: float64 = 1e-5,
max_iters: Natural = 1000
): float64{.raises: [ArithmeticError], effectsOf: [f, deriv].} =
## Newton-Raphson implementation for 1-dimensional functions

## Given a single variable function f and it's derivative, calcuate an approximation to f(x) = 0
Expand Down

0 comments on commit a3e8cf0

Please sign in to comment.