aboutsummaryrefslogtreecommitdiff
path: root/examples/Numeric/ADDual/Examples.hs
blob: d6aa6d239f63bc78f371cbea16b8c28747e45d9c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE TypeApplications #-}
module Numeric.ADDual.Examples where

import Control.DeepSeq
import Control.Monad (replicateM)
import qualified Data.Vector as V
import GHC.Generics (Generic)
import Hedgehog (Gen)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range


type Matrix s = V.Vector s

data FNeural a = FNeural [(Matrix a, V.Vector a)] (V.Vector a)
  deriving (Show, Functor, Foldable, Traversable, Generic)

instance NFData a => NFData (FNeural a)

fneural :: (Floating a, Ord a) => FNeural a -> a
fneural (FNeural layers input) =
  let dotp v1 v2 = V.sum (V.zipWith (*) v1 v2)

      mat @. vec =
        let n = V.length vec
            m = V.length mat `div` n
        in V.fromListN m $ map (\i -> dotp (V.slice (n*i) n mat) vec) [0 .. m-1]
      (+.) = V.zipWith (+)

      relu x = if x >= 0.0 then x else 0.0
      safeSoftmax vec = let m = V.maximum vec
                            factor = V.sum (V.map (\z -> exp (z - m)) vec)
                        in V.map (\z -> exp (z - m) / factor) vec
      forward [] x = safeSoftmax x
      forward ((weights, bias) : lys) x =
        let x' = V.map relu ((weights @. x) +. bias)
        in forward lys x'
  in V.sum $ forward layers input

makeNeuralInput :: FNeural Double
makeNeuralInput =
  let genMatrix nin nout =
        V.fromListN (nin*nout) [sin (fromIntegral @Int (i+j))
                               | i <- [0..nout-1], j <- [0..nin-1]]
      genVector nout = V.fromListN nout [sin (0.41 * fromIntegral @Int i) | i <- [0..nout-1]]
      -- 50 inputs; 2 hidden layers (100; 50); final softmax, then sum the outputs.
      nIn = 50; n1 = 100; n2 = 50
  in FNeural [(genMatrix nIn n1, genVector n1)
             ,(genMatrix n1 n2, genVector n2)]
             (genVector nIn)

genNeuralInput :: Gen (FNeural Double)
genNeuralInput = do
  let genScalar = Gen.double (Range.linearFracFrom 0 (-1) 1)
      genMatrix nin nout = V.fromListN (nin*nout) <$> replicateM (nin*nout) genScalar
      genVector nout = V.fromListN nout <$> replicateM nout genScalar
  nIn <- Gen.integral (Range.linear 1 80)
  n1 <- Gen.integral (Range.linear 1 100)
  n2 <- Gen.integral (Range.linear 1 80)
  m1 <- genMatrix nIn n1; v1 <- genVector n1
  m2 <- genMatrix n1 n2; v2 <- genVector n2
  inp <- genVector nIn
  pure $ FNeural [(m1, v1), (m2, v2)] inp