| @@ -21,7 +21,7 @@ namespace Tensorflow.Keras.Layers | |||||
| protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | ||||
| { | { | ||||
| return tf.nn.leaky_relu(inputs, alpha: alpha); | |||||
| return tf.nn.relu(inputs); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -0,0 +1,24 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Layers | |||||
| { | |||||
| /// <summary> | |||||
| /// Leaky version of a Rectified Linear Unit. | |||||
| /// </summary> | |||||
| public class ReLu : Layer | |||||
| { | |||||
| public ReLu(LayerArgs args) : base(args) | |||||
| { | |||||
| } | |||||
| protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| { | |||||
| return tf.nn.relu(inputs, name: Name); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,24 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Layers | |||||
| { | |||||
| /// <summary> | |||||
| /// Leaky version of a Rectified Linear Unit. | |||||
| /// </summary> | |||||
| public class Sigmoid : Layer | |||||
| { | |||||
| public Sigmoid(LayerArgs args) : base(args) | |||||
| { | |||||
| } | |||||
| protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| { | |||||
| return tf.nn.sigmoid(inputs, name: Name); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,24 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Layers | |||||
| { | |||||
| /// <summary> | |||||
| /// Leaky version of a Rectified Linear Unit. | |||||
| /// </summary> | |||||
| public class Tanh : Layer | |||||
| { | |||||
| public Tanh(LayerArgs args) : base(args) | |||||
| { | |||||
| } | |||||
| protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| { | |||||
| return tf.nn.tanh(inputs, name: Name); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -315,6 +315,12 @@ namespace Tensorflow.Keras.Layers | |||||
| return layer.Apply(inputs); | return layer.Apply(inputs); | ||||
| } | } | ||||
| public Layer ReLU() => new ReLu(new LayerArgs { }); | |||||
| public Layer Tanh() => new Tanh(new LayerArgs { }); | |||||
| public Layer Sigmoid() => new Sigmoid(new LayerArgs { }); | |||||
| /// <summary> | /// <summary> | ||||
| /// Leaky version of a Rectified Linear Unit. | /// Leaky version of a Rectified Linear Unit. | ||||
| /// </summary> | /// </summary> | ||||
| @@ -392,6 +398,8 @@ namespace Tensorflow.Keras.Layers | |||||
| => name switch | => name switch | ||||
| { | { | ||||
| "glorot_uniform" => tf.glorot_uniform_initializer, | "glorot_uniform" => tf.glorot_uniform_initializer, | ||||
| "random_uniform" => tf.random_uniform_initializer, | |||||
| "orthogonal" => tf.orthogonal_initializer, | |||||
| "zeros" => tf.zeros_initializer, | "zeros" => tf.zeros_initializer, | ||||
| "ones" => tf.ones_initializer, | "ones" => tf.ones_initializer, | ||||
| _ => tf.glorot_uniform_initializer | _ => tf.glorot_uniform_initializer | ||||
| @@ -11,6 +11,31 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
| [TestClass] | [TestClass] | ||||
| public class ActivationTest : EagerModeTestBase | public class ActivationTest : EagerModeTestBase | ||||
| { | { | ||||
| [TestMethod] | |||||
| public void ReLU() | |||||
| { | |||||
| var layer = keras.layers.ReLU(); | |||||
| Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f)); | |||||
| Equal(new[] { 0.0f, 0.0f, 0.0f, 2.0f }, output.ToArray<float>()); | |||||
| } | |||||
| [TestMethod] | |||||
| public void Sigmoid() | |||||
| { | |||||
| var layer = keras.layers.Sigmoid(); | |||||
| Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f)); | |||||
| Equal(new[] { 0.047425866f, 0.26894143f, 0.5f, 0.8807971f }, output.ToArray<float>()); | |||||
| } | |||||
| [TestMethod] | |||||
| public void Tanh() | |||||
| { | |||||
| var layer = keras.layers.Tanh(); | |||||
| Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f)); | |||||
| // {-0.9950547f, -0.7615942f, 0f, 0.9640276f} | |||||
| Equal(new[] { -0.9950547f, -0.7615942f, 0f, 0.9640276f }, output.ToArray<float>()); | |||||
| } | |||||
| [TestMethod] | [TestMethod] | ||||
| public void LeakyReLU() | public void LeakyReLU() | ||||
| { | { | ||||
| @@ -123,7 +123,9 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
| { 2, 3, 4, 5 }, | { 2, 3, 4, 5 }, | ||||
| { 3, 4, 5, 6 } | { 3, 4, 5, 6 } | ||||
| }); | }); | ||||
| // model.compile("rmsprop", "mse"); | |||||
| model.compile(optimizer: keras.optimizers.RMSprop(0.001f), | |||||
| loss: keras.losses.MeanSquaredError(), | |||||
| metrics: new[] { "acc" }); | |||||
| var output_array = model.predict(input_array); | var output_array = model.predict(input_array); | ||||
| Assert.AreEqual((32, 10, 64), output_array.shape); | Assert.AreEqual((32, 10, 64), output_array.shape); | ||||
| } | } | ||||