aboutsummaryrefslogtreecommitdiff
path: root/examples/Numeric/ADDual/Examples.hs
diff options
context:
space:
mode:
authorTom Smeding <t.j.smeding@uu.nl>2025-02-21 13:35:26 +0100
committerTom Smeding <t.j.smeding@uu.nl>2025-02-21 13:35:26 +0100
commita17bd53598ee5266fc3a1c45f8f4bb4798dc495e (patch)
treeee7962f603fbb26a0df0f793b8e50666f41a0dfd /examples/Numeric/ADDual/Examples.hs
parentb91d36fa38be07397b505433f24a6d29a79c2642 (diff)
Working tests and benchmarks against 'ad'
Diffstat (limited to 'examples/Numeric/ADDual/Examples.hs')
-rw-r--r--examples/Numeric/ADDual/Examples.hs37
1 files changed, 19 insertions, 18 deletions
diff --git a/examples/Numeric/ADDual/Examples.hs b/examples/Numeric/ADDual/Examples.hs
index d6aa6d2..819aec4 100644
--- a/examples/Numeric/ADDual/Examples.hs
+++ b/examples/Numeric/ADDual/Examples.hs
@@ -5,17 +5,21 @@ module Numeric.ADDual.Examples where
import Control.DeepSeq
import Control.Monad (replicateM)
+import Data.Maybe (catMaybes)
import qualified Data.Vector as V
import GHC.Generics (Generic)
-import Hedgehog (Gen)
+import Hedgehog (Gen, Size)
import qualified Hedgehog.Gen as Gen
import qualified Hedgehog.Range as Range
+import qualified Hedgehog.Internal.Gen as HI.Gen
+import qualified Hedgehog.Internal.Seed as HI.Seed
+import qualified Hedgehog.Internal.Tree as HI.Tree
type Matrix s = V.Vector s
data FNeural a = FNeural [(Matrix a, V.Vector a)] (V.Vector a)
- deriving (Show, Functor, Foldable, Traversable, Generic)
+ deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance NFData a => NFData (FNeural a)
@@ -39,27 +43,24 @@ fneural (FNeural layers input) =
in forward lys x'
in V.sum $ forward layers input
-makeNeuralInput :: FNeural Double
-makeNeuralInput =
- let genMatrix nin nout =
- V.fromListN (nin*nout) [sin (fromIntegral @Int (i+j))
- | i <- [0..nout-1], j <- [0..nin-1]]
- genVector nout = V.fromListN nout [sin (0.41 * fromIntegral @Int i) | i <- [0..nout-1]]
- -- 50 inputs; 2 hidden layers (100; 50); final softmax, then sum the outputs.
- nIn = 50; n1 = 100; n2 = 50
- in FNeural [(genMatrix nIn n1, genVector n1)
- ,(genMatrix n1 n2, genVector n2)]
- (genVector nIn)
+makeNeuralInput :: Int -> FNeural Double
+makeNeuralInput scale = sampleGenPure 100 (genNeuralInput scale)
-genNeuralInput :: Gen (FNeural Double)
-genNeuralInput = do
+genNeuralInput :: Int -> Gen (FNeural Double)
+genNeuralInput scale = do
let genScalar = Gen.double (Range.linearFracFrom 0 (-1) 1)
genMatrix nin nout = V.fromListN (nin*nout) <$> replicateM (nin*nout) genScalar
genVector nout = V.fromListN nout <$> replicateM nout genScalar
- nIn <- Gen.integral (Range.linear 1 80)
- n1 <- Gen.integral (Range.linear 1 100)
- n2 <- Gen.integral (Range.linear 1 80)
+ nIn <- Gen.integral (Range.linear 1 scale)
+ n1 <- Gen.integral (Range.linear 1 scale)
+ n2 <- Gen.integral (Range.linear 1 scale)
m1 <- genMatrix nIn n1; v1 <- genVector n1
m2 <- genMatrix n1 n2; v2 <- genVector n2
inp <- genVector nIn
pure $ FNeural [(m1, v1), (m2, v2)] inp
+
+
+sampleGenPure :: Size -> Gen a -> a
+sampleGenPure size gen =
+ HI.Tree.treeValue $ head $ catMaybes
+ [HI.Gen.evalGen size (HI.Seed.from n) gen | n <- [42..]]