diff --git a/README.md b/README.md index 9b7c7fb..c95208a 100644 --- a/README.md +++ b/README.md @@ -11,8 +11,9 @@ A minimal usage example would look like this: ```haskell main :: IO () main = do - {- Creates a new network with two inputs, two hidden layers and one output -} - network <- newIO [2, 2, 1] + {- Creates a new network with two inputs, + two hidden layers and one output -} + network <- new [2, 2, 1] {- Train the network for a common logical AND, until the maximum error of 0.01 is reached -} diff --git a/package.yaml b/package.yaml index 7857cac..a633e83 100644 --- a/package.yaml +++ b/package.yaml @@ -1,6 +1,6 @@ --- name: nn -version: 0.1.0 +version: 0.2.0 github: "saschagrunert/nn" license: MIT author: "Sascha Grunert" diff --git a/src/AI/Nn.hs b/src/AI/Nn.hs index c7b9e6d..2af2adb 100644 --- a/src/AI/Nn.hs +++ b/src/AI/Nn.hs @@ -6,7 +6,6 @@ module AI.Nn ( Network , predict , new - , newIO , train ) where @@ -97,6 +96,12 @@ sigmoidLayer :: [Neuron'] -> Layer () sigmoidLayer n = (biasNeuron x, ()) : createLayer n sigmoidNeuron where x = length $ head n +-- | Create a new standard network for a number of layer and neurons +-- +-- @since 0.1.0 +new :: [Int] -> IO Network +new n = newGen n <$> getStdGen + -- | Create a new output Layer from a list of Neuron' -- -- @since 0.1.0 @@ -106,8 +111,8 @@ outputLayer n = createLayer n outputNeuron -- | Create a new network for a StdGen and a number of layer and neurons -- -- @since 0.1.0 -new :: [Int] -> StdGen -> Network -new n g = (sigmoidLayer <$> init wss) ++ [outputLayer (last wss)] +newGen :: [Int] -> StdGen -> Network +newGen n g = (sigmoidLayer <$> init wss) ++ [outputLayer (last wss)] where rest = init n hiddenIcsNcs = zip ((+ 1) <$> rest) (tail rest) @@ -123,12 +128,6 @@ new n g = (sigmoidLayer <$> init wss) ++ [outputLayer (last wss)] wss = hidden ++ [outputWss] pack ic nc ws = (take nc $ chunksOf ic ws, drop (ic * nc) ws) --- | Create a new standard network for a number of layer and neurons --- --- @since 0.1.0 -newIO :: [Int] -> IO Network -newIO n = new n <$> getStdGen - -- | Do the complete back propagation -- -- @since 0.1.0 @@ -199,7 +198,6 @@ updateNeuron (n, fpi) d = (n { inputWeights = ws' }, e) e = activate' n (sumInputWeight fpi) * d ws' = zipWith (\x w -> w + (rate * e * x)) (inputs fpi) (inputWeights n) - -- | Trains a network with a set of vector pairs until the global error is -- smaller than epsilon -- diff --git a/test/Spec.hs b/test/Spec.hs index e87ae85..c76f75d 100644 --- a/test/Spec.hs +++ b/test/Spec.hs @@ -6,7 +6,7 @@ module Main ( main ) where -import AI.Nn (newIO +import AI.Nn (new ,predict ,train) import Test.Tasty (TestTree @@ -37,7 +37,7 @@ unitTests = do nnSpec :: Spec nnSpec = parallel $ do it "should succeed to train logical AND" $ do - n <- newIO [2, 2, 1] + n <- new [2, 2, 1] let nw = train 0.001 n @@ -48,7 +48,7 @@ nnSpec = parallel $ do round (head $ predict nw [0, 0]) `shouldBe` (0 :: Int) it "should succeed to train logical OR" $ do - n <- newIO [2, 2, 1] + n <- new [2, 2, 1] let nw = train 0.001 n @@ -59,7 +59,7 @@ nnSpec = parallel $ do round (head $ predict nw [0, 0]) `shouldBe` (0 :: Int) it "should succeed to train addition" $ do - n <- newIO [2, 2, 1] + n <- new [2, 2, 1] let nw = train 0.001 n