{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} module Numeric.ADDual.Examples where import Control.DeepSeq import Control.Monad (replicateM) import Data.Bifunctor (bimap) import Data.Maybe (catMaybes) import qualified Data.Vector as V import qualified Data.Vector.Storable as VS import GHC.Generics (Generic) import Hedgehog (Gen, Size) import qualified Hedgehog.Gen as Gen import qualified Hedgehog.Range as Range import qualified Hedgehog.Internal.Gen as HI.Gen import qualified Hedgehog.Internal.Seed as HI.Seed import qualified Hedgehog.Internal.Tree as HI.Tree import Numeric.ADDual.VectorOps type Matrix s = V.Vector s data FNeural a = FNeural [(Matrix a, V.Vector a)] (V.Vector a) deriving (Show, Eq, Functor, Foldable, Traversable, Generic) instance NFData a => NFData (FNeural a) type SMatrix s = VS.Vector s data FNeuralA v = FNeuralA [(V.Vector v, v)] v deriving (Show, Eq, Functor, Foldable, Traversable, Generic) instance NFData v => NFData (FNeuralA v) fneural :: (Floating a, Ord a) => FNeural a -> a fneural (FNeural layers input) = let dotp v1 v2 = V.sum (V.zipWith (*) v1 v2) mat @. vec = let n = V.length vec m = V.length mat `div` n in V.fromListN m $ map (\i -> dotp (V.slice (n*i) n mat) vec) [0 .. m-1] (+.) = V.zipWith (+) relu x = if x >= 0.0 then x else 0.0 safeSoftmax vec = let m = V.maximum vec factor = V.sum (V.map (\z -> exp (z - m)) vec) in V.map (\z -> exp (z - m) / factor) vec forward [] x = safeSoftmax x forward ((weights, bias) : lys) x = let x' = V.map relu ((weights @. x) +. bias) in forward lys x' in V.sum $ forward layers input makeNeuralInput :: Int -> FNeural Double makeNeuralInput scale = cvtFNeuralAtoFNeural $ makeNeuralInput_A scale genNeuralInput :: Int -> Gen (FNeural Double) genNeuralInput scale = cvtFNeuralAtoFNeural <$> genNeuralInput_A scale cvtFNeuralAtoFNeural :: FNeuralA (VS.Vector Double) -> FNeural Double cvtFNeuralAtoFNeural (FNeuralA layers input) = FNeural (map (bimap (\m -> let nin = V.length m nout = VS.length (m V.! 0) in V.fromListN (nin*nout) $ concatMap VS.toList $ V.toList m) (\v -> let n = VS.length v in V.fromListN n (VS.toList v))) layers) (let n = VS.length input in V.fromListN n (VS.toList input)) fneural_A :: forall v. (VectorOpsFloating v, VectorOpsOrd v) => FNeuralA v -> VectorOpsScalar v fneural_A (FNeuralA layers input) = let dotp v1 v2 = vsum (vmul v1 v2) (@.) :: V.Vector v -> v -> v mat @. vec = let n = vlength vec m = V.length mat `div` n in vfromListN m $ map (\row -> dotp row vec) (V.toList mat) (+.) = vadd batchrelu :: v -> v batchrelu x = vselect (vcmpGE x (vreplicate (vlength x) 0.0)) x (vreplicate (vlength x) 0.0) safeSoftmax vec = let m = vmaximum vec exps = vexp (vsub vec (vreplicate (vlength vec) m)) factor = vsum exps in vmul exps (vreplicate (vlength vec) (recip factor)) forward [] x = safeSoftmax x forward ((weights, bias) : lys) x = let x' = batchrelu ((weights @. x) +. bias) in forward lys x' in vsum $ forward layers input makeNeuralInput_A :: Int -> FNeuralA (VS.Vector Double) makeNeuralInput_A scale = sampleGenPure 100 (genNeuralInput_A scale) genNeuralInput_A :: Int -> Gen (FNeuralA (VS.Vector Double)) genNeuralInput_A scale = do let genScalar = Gen.double (Range.linearFracFrom 0 (-1) 1) genVector nout = VS.fromListN nout <$> replicateM nout genScalar genMatrix nin nout = V.fromListN nin <$> replicateM nin (genVector nout) nIn <- Gen.integral (Range.linear 1 scale) n1 <- Gen.integral (Range.linear 1 scale) n2 <- Gen.integral (Range.linear 1 scale) m1 <- genMatrix nIn n1; v1 <- genVector n1 m2 <- genMatrix n1 n2; v2 <- genVector n2 inp <- genVector nIn pure $ FNeuralA [(m1, v1), (m2, v2)] inp sampleGenPure :: Size -> Gen a -> a sampleGenPure size gen = HI.Tree.treeValue $ head $ catMaybes [HI.Gen.evalGen size (HI.Seed.from n) gen | n <- [42..]]