Newton-Raphson implementation

This commit is contained in:
sheaf 2023-01-09 23:59:00 +01:00
parent 671dae5474
commit 43098dec01

View file

@ -1,4 +1,7 @@
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# OPTIONS_GHC -Wno-name-shadowing #-}
module Math.Roots where
@ -274,3 +277,120 @@ derivative p = do
go ( i + 1 )
go 0
pure p'
--------------------------------------------------------------------------------
data NewtonRaphson
= NewtonRaphson
{ f :: Double -> (# Double, Double #)
, maxIters :: Word
, min_x, max_x :: Double
, digits :: Int }
-- NewtonRaphson implementation taken from Boost C++ library:
-- https://github.com/boostorg/math/blob/0dc6a70caa6bbec2b6ae25eede36c430f0ccae13/include/boost/math/tools/roots.hpp#L217
newtonRaphson :: NewtonRaphson
-> Double
-> Maybe Double
newtonRaphson ( NewtonRaphson { f, maxIters, min_x, max_x, digits } ) x0 =
go $
NewtonRaphsonState
{ f_x_prev = 0
, x = x0
, δ = maxDouble, δ1 = maxDouble
, iters = 0
, min_x, max_x
, min_f_x = 0, max_f_x = 0 }
where
factor = encodeFloat 1 ( 1 - digits )
go ( NewtonRaphsonState { f_x_prev, x, δ = δ1, δ1 = δ2, iters, min_x, max_x, min_f_x, max_f_x } )
= case f x of
(# f_x, f'_x #)
| f_x == 0
-> Just x
| δ <-
if | f'_x == 0
-> handleZeroDerivative f_x_prev x f_x δ1 min_x max_x
| otherwise
-> f_x / f'_x
, (# δ, δ1 #) <-
if | abs ( δ * δ ) > abs δ2
, let shift = if δ > 0 then 0.5 * ( x - min_x ) else 0.5 * ( x - max_x )
, δ <- if x /= 0 && ( abs shift > abs x )
then signum δ * abs x * 1.1
else shift
-> (# δ, 3 * δ #)
| otherwise
-> (# δ, δ1 #)
, let new_x = x - δ
, (# δ, new_x #) <-
if | new_x <= min_x
, δ <- 0.5 * ( x - min_x )
-> (# δ, x - δ #)
| new_x >= max_x
, δ <- 0.5 * ( x - max_x )
-> (# δ, x - δ #)
| otherwise
-> (# δ, new_x #)
-> if
| abs δ <= abs ( new_x * factor )
|| new_x == min_x || new_x == max_x
-> Just x
| iters >= maxIters
-> Nothing
| (# min_x, max_x, min_f_x, max_f_x #) <-
if δ > 0
then (# min_x, x, min_f_x, f_x #)
else (# x, max_x, f_x, max_f_x #)
-> if min_f_x * max_f_x > 0
then Nothing
else
go $ NewtonRaphsonState
{ f_x_prev = f_x, x = new_x
, δ, δ1
, iters = iters + 1
, min_x, max_x
, min_f_x, max_f_x
}
handleZeroDerivative :: Double
-> Double -> Double
-> Double
-> Double -> Double
-> Double
handleZeroDerivative f_x_prev x f_x δ min_x max_x
-- Handle zero derivative on first iteration.
| f_x_prev == 0
, x_prev <- if x <= 0.5 * ( min_x + max_x ) then max_x else min_x
, (# f_x_prev, _ #) <- f x_prev
, δ <- x_prev - x
= finish f_x_prev δ
| otherwise
= finish f_x_prev δ
where
finish f_x_prev δ
| signum f_x_prev * signum f_x < 0
= if δ < 0 then 0.5 * ( x - min_x ) else 0.5 * ( x - max_x )
| otherwise
= if δ < 0 then 0.5 * ( x - max_x ) else 0.5 * ( x - min_x )
-- | Loop state for the 'newtonRaphson' function.
data NewtonRaphsonState =
NewtonRaphsonState
{ f_x_prev :: !Double
, x :: !Double
, δ, δ1 :: !Double
, iters :: !Word
, min_x, max_x :: !Double
, min_f_x, max_f_x :: !Double }
maxDouble :: Double
maxDouble = encodeFloat m n
where
b = floatRadix ( 0 :: Double )
e = floatDigits ( 0 :: Double )
(_, e') = floatRange ( 0 :: Double )
m = b ^ e - 1
n = e' - e