diff options
Diffstat (limited to 'bench/Main.hs')
-rw-r--r-- | bench/Main.hs | 45 |
1 files changed, 1 insertions, 44 deletions
diff --git a/bench/Main.hs b/bench/Main.hs index cb5e829..a11f4e8 100644 --- a/bench/Main.hs +++ b/bench/Main.hs @@ -1,56 +1,13 @@ -{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE TypeApplications #-} -{-# LANGUAGE DeriveGeneric #-} module Main where -import Control.DeepSeq import Criterion import Criterion.Main -import qualified Data.Vector as V -import GHC.Generics (Generic) import qualified Numeric.ADDual as ADD +import Numeric.ADDual.Examples -type Matrix s = V.Vector s - -data FNeural a = FNeural [(Matrix a, V.Vector a)] (V.Vector a) - deriving (Show, Functor, Foldable, Traversable, Generic) - -instance NFData a => NFData (FNeural a) - -fneural :: (Floating a, Ord a) => FNeural a -> a -fneural (FNeural layers input) = - let dotp v1 v2 = V.sum (V.zipWith (*) v1 v2) - - mat @. vec = - let n = V.length vec - m = V.length mat `div` n - in V.fromListN m $ map (\i -> dotp (V.slice (n*i) n mat) vec) [0 .. m-1] - (+.) = V.zipWith (+) - - relu x = if x >= 0.0 then x else 0.0 - safeSoftmax vec = let m = V.maximum vec - factor = V.sum (V.map (\z -> exp (z - m)) vec) - in V.map (\z -> exp (z - m) / factor) vec - forward [] x = safeSoftmax x - forward ((weights, bias) : lys) x = - let x' = V.map relu ((weights @. x) +. bias) - in forward lys x' - in V.sum $ forward layers input - -makeNeuralInput :: FNeural Double -makeNeuralInput = - let genMatrix nin nout = - V.fromListN (nin*nout) [sin (fromIntegral @Int (i+j)) - | i <- [0..nout-1], j <- [0..nin-1]] - genVector nout = V.fromListN nout [sin (0.41 * fromIntegral @Int i) | i <- [0..nout-1]] - -- 50 inputs; 2 hidden layers (100; 50); final softmax, then sum the outputs. - nIn = 50; n1 = 100; n2 = 50 - in FNeural [(genMatrix nIn n1, genVector n1) - ,(genMatrix n1 n2, genVector n2)] - (genVector nIn) - main :: IO () main = defaultMain [env (pure makeNeuralInput) $ \input -> |