Skip to content

Commit

Permalink
Improve 'new' api
Browse files Browse the repository at this point in the history
  • Loading branch information
saschagrunert committed Apr 2, 2018
1 parent f714ebb commit 7e07663
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 17 deletions.
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@ A minimal usage example would look like this:
```haskell
main :: IO ()
main = do
{- Creates a new network with two inputs, two hidden layers and one output -}
network <- newIO [2, 2, 1]
{- Creates a new network with two inputs,
two hidden layers and one output -}
network <- new [2, 2, 1]

{- Train the network for a common logical AND,
until the maximum error of 0.01 is reached -}
Expand Down
2 changes: 1 addition & 1 deletion package.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
---
name: nn
version: 0.1.0
version: 0.2.0
github: "saschagrunert/nn"
license: MIT
author: "Sascha Grunert"
Expand Down
18 changes: 8 additions & 10 deletions src/AI/Nn.hs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ module AI.Nn
( Network
, predict
, new
, newIO
, train
) where

Expand Down Expand Up @@ -97,6 +96,12 @@ sigmoidLayer :: [Neuron'] -> Layer ()
sigmoidLayer n = (biasNeuron x, ()) : createLayer n sigmoidNeuron
where x = length $ head n

-- | Create a new standard network for a number of layer and neurons
--
-- @since 0.1.0
new :: [Int] -> IO Network
new n = newGen n <$> getStdGen

-- | Create a new output Layer from a list of Neuron'
--
-- @since 0.1.0
Expand All @@ -106,8 +111,8 @@ outputLayer n = createLayer n outputNeuron
-- | Create a new network for a StdGen and a number of layer and neurons
--
-- @since 0.1.0
new :: [Int] -> StdGen -> Network
new n g = (sigmoidLayer <$> init wss) ++ [outputLayer (last wss)]
newGen :: [Int] -> StdGen -> Network
newGen n g = (sigmoidLayer <$> init wss) ++ [outputLayer (last wss)]
where
rest = init n
hiddenIcsNcs = zip ((+ 1) <$> rest) (tail rest)
Expand All @@ -123,12 +128,6 @@ new n g = (sigmoidLayer <$> init wss) ++ [outputLayer (last wss)]
wss = hidden ++ [outputWss]
pack ic nc ws = (take nc $ chunksOf ic ws, drop (ic * nc) ws)

-- | Create a new standard network for a number of layer and neurons
--
-- @since 0.1.0
newIO :: [Int] -> IO Network
newIO n = new n <$> getStdGen

-- | Do the complete back propagation
--
-- @since 0.1.0
Expand Down Expand Up @@ -199,7 +198,6 @@ updateNeuron (n, fpi) d = (n { inputWeights = ws' }, e)
e = activate' n (sumInputWeight fpi) * d
ws' = zipWith (\x w -> w + (rate * e * x)) (inputs fpi) (inputWeights n)


-- | Trains a network with a set of vector pairs until the global error is
-- smaller than epsilon
--
Expand Down
8 changes: 4 additions & 4 deletions test/Spec.hs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ module Main
( main
) where

import AI.Nn (newIO
import AI.Nn (new
,predict
,train)
import Test.Tasty (TestTree
Expand Down Expand Up @@ -37,7 +37,7 @@ unitTests = do
nnSpec :: Spec
nnSpec = parallel $ do
it "should succeed to train logical AND" $ do
n <- newIO [2, 2, 1]
n <- new [2, 2, 1]
let
nw = train 0.001
n
Expand All @@ -48,7 +48,7 @@ nnSpec = parallel $ do
round (head $ predict nw [0, 0]) `shouldBe` (0 :: Int)

it "should succeed to train logical OR" $ do
n <- newIO [2, 2, 1]
n <- new [2, 2, 1]
let
nw = train 0.001
n
Expand All @@ -59,7 +59,7 @@ nnSpec = parallel $ do
round (head $ predict nw [0, 0]) `shouldBe` (0 :: Int)

it "should succeed to train addition" $ do
n <- newIO [2, 2, 1]
n <- new [2, 2, 1]
let
nw = train 0.001
n
Expand Down

0 comments on commit 7e07663

Please sign in to comment.