aboutsummaryrefslogtreecommitdiff
path: root/examples/Numeric/ADDual/Examples.hs
blob: 3835daa29a9b802e30c4614d604858266618c9e7 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
module Numeric.ADDual.Examples where

import Control.DeepSeq
import Control.Monad (replicateM)
import Data.Bifunctor (bimap)
import Data.Maybe (catMaybes)
import qualified Data.Vector as V
import qualified Data.Vector.Storable as VS
import GHC.Generics (Generic)
import Hedgehog (Gen, Size)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
import qualified Hedgehog.Internal.Gen as HI.Gen
import qualified Hedgehog.Internal.Seed as HI.Seed
import qualified Hedgehog.Internal.Tree as HI.Tree

import Numeric.ADDual.VectorOps


type Matrix s = V.Vector s

data FNeural a = FNeural [(Matrix a, V.Vector a)] (V.Vector a)
  deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance NFData a => NFData (FNeural a)

type SMatrix s = VS.Vector s

data FNeuralA v = FNeuralA [(V.Vector v, v)] v
  deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance NFData v => NFData (FNeuralA v)

fneural :: (Floating a, Ord a) => FNeural a -> a
fneural (FNeural layers input) =
  let dotp v1 v2 = V.sum (V.zipWith (*) v1 v2)

      mat @. vec =
        let n = V.length vec
            m = V.length mat `div` n
        in V.fromListN m $ map (\i -> dotp (V.slice (n*i) n mat) vec) [0 .. m-1]
      (+.) = V.zipWith (+)

      relu x = if x >= 0.0 then x else 0.0
      safeSoftmax vec = let m = V.maximum vec
                            factor = V.sum (V.map (\z -> exp (z - m)) vec)
                        in V.map (\z -> exp (z - m) / factor) vec
      forward [] x = safeSoftmax x
      forward ((weights, bias) : lys) x =
        let x' = V.map relu ((weights @. x) +. bias)
        in forward lys x'
  in V.sum $ forward layers input

makeNeuralInput :: Int -> FNeural Double
makeNeuralInput scale = cvtFNeuralAtoFNeural $ makeNeuralInput_A scale

genNeuralInput :: Int -> Gen (FNeural Double)
genNeuralInput scale = cvtFNeuralAtoFNeural <$> genNeuralInput_A scale

cvtFNeuralAtoFNeural :: FNeuralA (VS.Vector Double) -> FNeural Double
cvtFNeuralAtoFNeural (FNeuralA layers input) =
  FNeural (map (bimap (\m -> let nin = V.length m
                                 nout = VS.length (m V.! 0)
                             in V.fromListN (nin*nout) $ concatMap VS.toList $ V.toList m)
                      (\v -> let n = VS.length v in V.fromListN n (VS.toList v)))
                      layers)
               (let n = VS.length input in V.fromListN n (VS.toList input))

fneural_A :: forall v. (VectorOpsFloating v, VectorOpsOrd v)
          => FNeuralA v -> VectorOpsScalar v
fneural_A (FNeuralA layers input) =
  let dotp v1 v2 = vsum (vmul v1 v2)

      (@.) :: V.Vector v -> v -> v
      mat @. vec =
        let n = vlength vec
            m = V.length mat `div` n
        in vfromListN m $ map (\row -> dotp row vec) (V.toList mat)
      (+.) = vadd

      batchrelu :: v -> v
      batchrelu x = vselect (vcmpGE x (vreplicate (vlength x) 0.0)) x (vreplicate (vlength x) 0.0)
      safeSoftmax vec = let m = vmaximum vec
                            exps = vexp (vsub vec (vreplicate (vlength vec) m))
                            factor = vsum exps
                        in vmul exps (vreplicate (vlength vec) (recip factor))
      forward [] x = safeSoftmax x
      forward ((weights, bias) : lys) x =
        let x' = batchrelu ((weights @. x) +. bias)
        in forward lys x'
  in vsum $ forward layers input

makeNeuralInput_A :: Int -> FNeuralA (VS.Vector Double)
makeNeuralInput_A scale = sampleGenPure 100 (genNeuralInput_A scale)

genNeuralInput_A :: Int -> Gen (FNeuralA (VS.Vector Double))
genNeuralInput_A scale = do
  let genScalar = Gen.double (Range.linearFracFrom 0 (-1) 1)
      genVector nout = VS.fromListN nout <$> replicateM nout genScalar
      genMatrix nin nout = V.fromListN nin <$> replicateM nin (genVector nout)
  nIn <- Gen.integral (Range.linear 1 scale)
  n1 <- Gen.integral (Range.linear 1 scale)
  n2 <- Gen.integral (Range.linear 1 scale)
  m1 <- genMatrix nIn n1; v1 <- genVector n1
  m2 <- genMatrix n1 n2; v2 <- genVector n2
  inp <- genVector nIn
  pure $ FNeuralA [(m1, v1), (m2, v2)] inp


sampleGenPure :: Size -> Gen a -> a
sampleGenPure size gen =
  HI.Tree.treeValue $ head $ catMaybes
    [HI.Gen.evalGen size (HI.Seed.from n) gen | n <- [42..]]