diff --git a/encog-core-cs/Engine/Network/Activation/ActivationBiPolar.cs b/encog-core-cs/Engine/Network/Activation/ActivationBiPolar.cs index 7f9c5697..7d13577f 100644 --- a/encog-core-cs/Engine/Network/Activation/ActivationBiPolar.cs +++ b/encog-core-cs/Engine/Network/Activation/ActivationBiPolar.cs @@ -84,7 +84,7 @@ public virtual String[] ParamNames { get { - String[] result = {"slope"}; + String[] result = {}; return result; } } diff --git a/encog-core-cs/Engine/Network/Activation/ActivationBipolarSteepenedSigmoid.cs b/encog-core-cs/Engine/Network/Activation/ActivationBipolarSteepenedSigmoid.cs index 0c1da28f..7d0cf406 100644 --- a/encog-core-cs/Engine/Network/Activation/ActivationBipolarSteepenedSigmoid.cs +++ b/encog-core-cs/Engine/Network/Activation/ActivationBipolarSteepenedSigmoid.cs @@ -45,26 +45,25 @@ public class ActivationBipolarSteepenedSigmoid : IActivationFunction /// private double[] _params; + public ActivationBipolarSteepenedSigmoid() + { + _params = new double[0]; + } + /// public void ActivationFunction(double[] d, int start, int size) { for (int i = start; i < start + size; i++) { - if (d[i] < -1.0) - { - d[i] = -1.0; - } - if (d[i] > 1.0) - { - d[i] = 1.0; - } + d[i] = (2.0 / (1.0 + Math.Exp(-4.9 * d[i]))) - 1.0; } } /// public double DerivativeFunction(double b, double a) { - return 1; + double s = Math.Exp(-4.9 * a); + return 2 * Math.Pow(s * 4.9 / (1 + s), 2); } /// diff --git a/encog-core-cs/Engine/Network/Activation/ActivationClippedLinear.cs b/encog-core-cs/Engine/Network/Activation/ActivationClippedLinear.cs index 0af80637..750e0d0c 100644 --- a/encog-core-cs/Engine/Network/Activation/ActivationClippedLinear.cs +++ b/encog-core-cs/Engine/Network/Activation/ActivationClippedLinear.cs @@ -48,20 +48,30 @@ public void ActivationFunction(double[] d, int start, int size) { for (int i = start; i < start + size; i++) { - d[i] = (2.0 / (1.0 + Math.Exp(-4.9 * d[i]))) - 1.0; + if (d[i] > 1) + { + d[i] = 1; + } + else if(d[i] < -1) + { + d[i] = -1; + } } } /// public double DerivativeFunction(double b, double a) { - return 1; + if (b < -1 || 1 < b) + return 0; + else + return 1; } /// public Object Clone() { - return new ActivationBipolarSteepenedSigmoid(); + return new ActivationClippedLinear(); } /// diff --git a/encog-core-cs/Engine/Network/Activation/ActivationRamp.cs b/encog-core-cs/Engine/Network/Activation/ActivationRamp.cs index 2d8e7866..e0578e35 100644 --- a/encog-core-cs/Engine/Network/Activation/ActivationRamp.cs +++ b/encog-core-cs/Engine/Network/Activation/ActivationRamp.cs @@ -160,22 +160,22 @@ public virtual bool HasDerivative public virtual void ActivationFunction(double[] x, int start, int size) { - double slope = (_paras[ParamRampHighThreshold] - _paras[ParamRampLowThreshold]) - /(_paras[ParamRampHigh] - _paras[ParamRampLow]); + double a = (High - Low) / (ThresholdHigh - ThresholdLow); + double b = (ThresholdHigh * Low - ThresholdLow * High) / (ThresholdHigh - ThresholdLow); for (int i = start; i < start + size; i++) { - if (x[i] < _paras[ParamRampLowThreshold]) + if (x[i] < ThresholdLow) { - x[i] = _paras[ParamRampLow]; + x[i] = Low; } - else if (x[i] > _paras[ParamRampHighThreshold]) + else if (x[i] > ThresholdHigh) { - x[i] = _paras[ParamRampHigh]; + x[i] = High; } else { - x[i] = (slope*x[i]); + x[i] = a * x[i] + b; } } } @@ -183,7 +183,10 @@ public virtual void ActivationFunction(double[] x, int start, /// public virtual double DerivativeFunction(double b, double a) { - return 1.0d; + if (b < ThresholdLow || ThresholdHigh < b) + return 0; + else + return (High - Low) / (ThresholdHigh - ThresholdLow); } /// diff --git a/encog-core-cs/Engine/Network/Activation/ActivationReLU.cs b/encog-core-cs/Engine/Network/Activation/ActivationReLU.cs index 5dabd52d..d54fba30 100644 --- a/encog-core-cs/Engine/Network/Activation/ActivationReLU.cs +++ b/encog-core-cs/Engine/Network/Activation/ActivationReLU.cs @@ -6,9 +6,8 @@ namespace Encog.Engine.Network.Activation { /// - /// ReLU activation function. This function has a high and low threshold. If - /// the high threshold is exceeded a fixed value is returned.Likewise, if the - /// low value is exceeded another fixed value is returned. + /// ReLU activation function. This function has a low threshold. If + /// the low value is exceeded a fixed value is returned. /// [Serializable] public class ActivationReLU: IActivationFunction @@ -22,7 +21,7 @@ public class ActivationReLU: IActivationFunction /// /// The ramp low parameter. /// - public const int PARAM_RELU_LOW = 0; + public const int PARAM_RELU_LOW = 1; /// /// The parameters. @@ -41,7 +40,7 @@ public ActivationReLU(): } /// - /// Construct a ramp activation function. + /// Construct a Rectifier activation function. /// /// The low threshold value. /// The low value, replaced if the low threshold is exceeded. @@ -86,7 +85,11 @@ public void ActivationFunction(double[] x, int start, int size) /// public double DerivativeFunction(double b, double a) { - return 1 / (1 + Math.Pow(Math.E, -a)); + if(b <= _params[ActivationReLU.PARAM_RELU_LOW_THRESHOLD]) + { + return 0; + } + return 1.0; } /// diff --git a/encog-core-cs/Engine/Network/Activation/ActivationSmoothReLU.cs b/encog-core-cs/Engine/Network/Activation/ActivationSmoothReLU.cs index bc0756ca..3a89c5c9 100644 --- a/encog-core-cs/Engine/Network/Activation/ActivationSmoothReLU.cs +++ b/encog-core-cs/Engine/Network/Activation/ActivationSmoothReLU.cs @@ -53,7 +53,7 @@ public void ActivationFunction(double[] x, int start, int size) /// public double DerivativeFunction(double b, double a) { - return 1 / (1 + Math.Pow(Math.E, -a)); + return 1 / (1 + Math.Pow(Math.E, -b)); } /// diff --git a/encog-core-cs/Engine/Network/Activation/ActivationSoftMax.cs b/encog-core-cs/Engine/Network/Activation/ActivationSoftMax.cs index 449781ad..356f3c52 100644 --- a/encog-core-cs/Engine/Network/Activation/ActivationSoftMax.cs +++ b/encog-core-cs/Engine/Network/Activation/ActivationSoftMax.cs @@ -55,7 +55,7 @@ public object Clone() return new ActivationSoftMax(); } - /// Return false, softmax has no derivative. + /// Return true. public virtual bool HasDerivative { get diff --git a/encog-core-test/Engine/Network/Activation/TestActivationBiPolar.cs b/encog-core-test/Engine/Network/Activation/TestActivationBiPolar.cs index af70d9b6..5a9d2410 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationBiPolar.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationBiPolar.cs @@ -28,15 +28,15 @@ namespace Encog.Engine.Network.Activation public class TestActivationBiPolar { [TestMethod] - public void TestActivation() + public void TestBiPolar() { var activation = new ActivationBiPolar(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationBiPolar)activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationBiPolar)); - double[] input = { 0.5, -0.5 }; + double[] input = { 0.1, -0.1 }; activation.ActivationFunction(input, 0, input.Length); diff --git a/encog-core-test/Engine/Network/Activation/TestActivationGaussian.cs b/encog-core-test/Engine/Network/Activation/TestActivationGaussian.cs index 529b6a87..a575ba1e 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationGaussian.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationGaussian.cs @@ -33,8 +33,8 @@ public void TestGaussian() var activation = new ActivationGaussian(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationGaussian) activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationGaussian)); double[] input = {0.0}; diff --git a/encog-core-test/Engine/Network/Activation/TestActivationLOG.cs b/encog-core-test/Engine/Network/Activation/TestActivationLOG.cs index e0940e24..4cb5aaa2 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationLOG.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationLOG.cs @@ -21,6 +21,7 @@ // http://www.heatonresearch.com/copyright // using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; namespace Encog.Engine.Network.Activation { @@ -33,18 +34,20 @@ public void TestLog() var activation = new ActivationLOG(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationLOG) activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationLOG)); - double[] input = {0.0}; + double[] input = { 0.0, Math.E - 1, Math.E * Math.E - 1 }; - activation.ActivationFunction(input, 0, 1); + activation.ActivationFunction(input, 0, 3); - Assert.AreEqual(0.0, input[0], 0.1); + Assert.AreEqual(0.0, input[0], 0.01); + Assert.AreEqual(1.0, input[1], 0.01); + Assert.AreEqual(2.0, input[2], 0.01); // test derivative - input[0] = activation.DerivativeFunction(input[0],input[0]); - Assert.AreEqual(1.0, input[0], 0.1); + input[0] = activation.DerivativeFunction(0, input[0]); + Assert.AreEqual(1.0, input[0], 0.01); } } } diff --git a/encog-core-test/Engine/Network/Activation/TestActivationLinear.cs b/encog-core-test/Engine/Network/Activation/TestActivationLinear.cs index c798e61e..187e38a0 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationLinear.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationLinear.cs @@ -33,20 +33,25 @@ public void TestLinear() var activation = new ActivationLinear(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationLinear) activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationLinear)); double[] input = {1, 2, 3}; - activation.ActivationFunction(input, 0, 1); + activation.ActivationFunction(input, 0, 3); - Assert.AreEqual(1.0, input[0], 0.1); - Assert.AreEqual(2.0, input[1], 0.1); - Assert.AreEqual(3.0, input[2], 0.1); + Assert.AreEqual(1.0, input[0], 0.0); + Assert.AreEqual(2.0, input[1], 0.0); + Assert.AreEqual(3.0, input[2], 0.0); // test derivative, should not throw an error - input[0] = activation.DerivativeFunction(input[0],input[0]); + input[0] = activation.DerivativeFunction(input[0], input[0]); + input[1] = activation.DerivativeFunction(input[1], input[1]); + input[2] = activation.DerivativeFunction(input[2], input[2]); + Assert.AreEqual(1.0, input[0], 0.0); + Assert.AreEqual(1.0, input[1], 0.0); + Assert.AreEqual(1.0, input[2], 0.0); } } } diff --git a/encog-core-test/Engine/Network/Activation/TestActivationRamp.cs b/encog-core-test/Engine/Network/Activation/TestActivationRamp.cs new file mode 100644 index 00000000..c775c832 --- /dev/null +++ b/encog-core-test/Engine/Network/Activation/TestActivationRamp.cs @@ -0,0 +1,42 @@ +using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; + +namespace Encog.Engine.Network.Activation +{ + [TestClass] + public class TestActivationRamp + { + [TestMethod] + public void TestRamp() + { + var activation = new ActivationRamp(2, -2, 3, 1); + Assert.IsTrue(activation.HasDerivative); + + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationRamp)); + + double[] input = { -3, -2, 0, 2, 3 }; + + //Clone should have same parameters + CollectionAssert.AreEqual(activation.Params, ((ActivationRamp)clone).Params); + + activation.ActivationFunction(input, 0, 5); + + Assert.AreEqual(1.0, input[0], EncogFramework.DefaultDoubleEqual); + Assert.AreEqual(1.0, input[1], EncogFramework.DefaultDoubleEqual); + Assert.AreEqual(2.0, input[2], EncogFramework.DefaultDoubleEqual); + Assert.AreEqual(3.0, input[3], EncogFramework.DefaultDoubleEqual); + Assert.AreEqual(3.0, input[4], EncogFramework.DefaultDoubleEqual); + + input[0] = activation.DerivativeFunction(-3, input[0]); + input[2] = activation.DerivativeFunction(0, input[2]); + input[4] = activation.DerivativeFunction(3, input[4]); + Assert.AreEqual(0.0, input[0], EncogFramework.DefaultDoubleEqual); + Assert.AreEqual(0.5, input[2], EncogFramework.DefaultDoubleEqual); + Assert.AreEqual(0.0, input[4], EncogFramework.DefaultDoubleEqual); + } + } +} diff --git a/encog-core-test/Engine/Network/Activation/TestActivationReLU.cs b/encog-core-test/Engine/Network/Activation/TestActivationReLU.cs index 9612f6de..7bde140e 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationReLU.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationReLU.cs @@ -15,18 +15,17 @@ public void TestRELU() var activation = new ActivationReLU(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationReLU)activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationReLU)); - double[] input = { 0.0 }; + double[] input = { -2, -1, 0, 1, 2 }; - activation.ActivationFunction(input, 0, 1); + activation.ActivationFunction(input, 0, 5); - Assert.AreEqual(0.0, input[0], EncogFramework.DefaultDoubleEqual); + CollectionAssert.AreEqual(new double[] { 0, 0, 0, 1, 2 }, input); - // test derivative, wiki says this is logistic function (test may be wrong - jeroldhaas) - input[0] = activation.DerivativeFunction(input[0], input[0]); - Assert.AreEqual(0.5, input[0], EncogFramework.DefaultDoubleEqual); + input[2] = activation.DerivativeFunction(0, input[2]); + Assert.AreEqual(0.0, input[2], EncogFramework.DefaultDoubleEqual); } } } diff --git a/encog-core-test/Engine/Network/Activation/TestActivationSIN.cs b/encog-core-test/Engine/Network/Activation/TestActivationSIN.cs index 16d5aaec..b1fbdc99 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationSIN.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationSIN.cs @@ -21,6 +21,7 @@ // http://www.heatonresearch.com/copyright // using Microsoft.VisualStudio.TestTools.UnitTesting; +using System; namespace Encog.Engine.Network.Activation { @@ -33,18 +34,24 @@ public void TestSIN() var activation = new ActivationSIN(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationSIN) activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationSIN)); - double[] input = {0.0}; + double[] input = { 0.0, Math.PI / 4, Math.PI / 2 }; - activation.ActivationFunction(input, 0, 1); + activation.ActivationFunction(input, 0, 3); //it's actually Sin(2x) - Assert.AreEqual(0.0, input[0], 0.1); + Assert.AreEqual(0.0, input[0], 0.01); + Assert.AreEqual(1.0, input[1], 0.01); + Assert.AreEqual(0.0, input[2], 0.01); - // test derivative, should throw an error - input[0] = activation.DerivativeFunction(input[0],input[0]); - Assert.AreEqual(1.0, input[0], 0.1); + // test derivative + input[0] = activation.DerivativeFunction(0, input[0]); + input[1] = activation.DerivativeFunction(Math.PI / 4, input[1]); + input[2] = activation.DerivativeFunction(Math.PI / 2, input[2]); + Assert.AreEqual(1.0, input[0], 0.01); + Assert.AreEqual(0.0, input[1], 0.01); + Assert.AreEqual(-1.0, input[2], 0.01); } } } diff --git a/encog-core-test/Engine/Network/Activation/TestActivationSigmoid.cs b/encog-core-test/Engine/Network/Activation/TestActivationSigmoid.cs index f09811bd..2a599f0e 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationSigmoid.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationSigmoid.cs @@ -33,8 +33,8 @@ public void TestSigmoid() var activation = new ActivationSigmoid(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationSigmoid) activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationSigmoid)); double[] input = {0.0}; @@ -42,8 +42,7 @@ public void TestSigmoid() Assert.AreEqual(0.5, input[0], 0.1); - // test derivative, should throw an error - input[0] = activation.DerivativeFunction(input[0],input[0]); + input[0] = activation.DerivativeFunction(0, input[0]); Assert.AreEqual(0.25, input[0], 0.1); } } diff --git a/encog-core-test/Engine/Network/Activation/TestActivationSmoothReLU.cs b/encog-core-test/Engine/Network/Activation/TestActivationSmoothReLU.cs index 44664582..6b12180d 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationSmoothReLU.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationSmoothReLU.cs @@ -7,13 +7,13 @@ namespace Encog.Engine.Network.Activation public class TestActivationSmoothReLU { [TestMethod] - public void TestRectifier() + public void TestSmoothReLU() { var activation = new ActivationSmoothReLU(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationSmoothReLU)activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationSmoothReLU)); double[] input = { 0.0 }; @@ -21,9 +21,8 @@ public void TestRectifier() Assert.AreEqual(0.69314718055994529, input[0], EncogFramework.DefaultDoubleEqual); - // test derivative, wiki says this is logistic function (test may be wrong - jeroldhaas) - input[0] = activation.DerivativeFunction(input[0], input[0]); - Assert.AreEqual(0.66666666666666666, input[0], EncogFramework.DefaultDoubleEqual); + input[0] = activation.DerivativeFunction(0, input[0]); + Assert.AreEqual(0.5, input[0], EncogFramework.DefaultDoubleEqual); } } } diff --git a/encog-core-test/Engine/Network/Activation/TestActivationSoftMax.cs b/encog-core-test/Engine/Network/Activation/TestActivationSoftMax.cs index 8f65914a..f958873c 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationSoftMax.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationSoftMax.cs @@ -28,23 +28,27 @@ namespace Encog.Engine.Network.Activation public class TestActivationSoftMax { [TestMethod] - public void TestSIN() + public void TestSoftMax() { - var activation = new ActivationSIN(); + var activation = new ActivationSoftMax(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationSIN) activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationSoftMax)); - double[] input = {0.0}; + double[] input = {1, 2, 3}; - activation.ActivationFunction(input, 0, 1); + activation.ActivationFunction(input, 0, 3); + + Assert.AreEqual(0.09, input[0], 0.01); + Assert.AreEqual(0.24, input[1], 0.01); + Assert.AreEqual(0.67, input[2], 0.01); - Assert.AreEqual(0.0, input[0], 0.1); + double sum = input[0] + input[1] + input[2]; + Assert.AreEqual(1, sum, EncogFramework.DefaultDoubleEqual); - // test derivative, should throw an error input[0] = activation.DerivativeFunction(input[0],input[0]); - Assert.AreEqual(1.0, input[0], 0.1); + Assert.AreEqual(1.0, input[0], EncogFramework.DefaultDoubleEqual); } } } diff --git a/encog-core-test/Engine/Network/Activation/TestActivationTANH.cs b/encog-core-test/Engine/Network/Activation/TestActivationTANH.cs index 25faa7cc..adeca946 100644 --- a/encog-core-test/Engine/Network/Activation/TestActivationTANH.cs +++ b/encog-core-test/Engine/Network/Activation/TestActivationTANH.cs @@ -33,17 +33,14 @@ public void TestTANH() var activation = new ActivationTANH(); Assert.IsTrue(activation.HasDerivative); - var clone = (ActivationTANH) activation.Clone(); - Assert.IsNotNull(clone); + var clone = activation.Clone(); + Assert.IsInstanceOfType(clone, typeof(ActivationTANH)); double[] input = {0.0}; activation.ActivationFunction(input, 0, 1); - Assert.AreEqual(0.0, input[0], 0.1); - - // test derivative, should throw an error input[0] = activation.DerivativeFunction(input[0],input[0]); Assert.AreEqual(1.0, input[0], 0.1); } diff --git a/encog-core-test/encog-core-test.csproj b/encog-core-test/encog-core-test.csproj index fd038e63..0b615a4f 100644 --- a/encog-core-test/encog-core-test.csproj +++ b/encog-core-test/encog-core-test.csproj @@ -67,6 +67,7 @@ +