diff --git a/src/TensorFlowNET.Keras/Layers/Activation/LeakyReLu.cs b/src/TensorFlowNET.Keras/Layers/Activation/LeakyReLu.cs
index 625e81d4..662cd908 100644
--- a/src/TensorFlowNET.Keras/Layers/Activation/LeakyReLu.cs
+++ b/src/TensorFlowNET.Keras/Layers/Activation/LeakyReLu.cs
@@ -21,7 +21,7 @@ namespace Tensorflow.Keras.Layers
protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
- return tf.nn.leaky_relu(inputs, alpha: alpha);
+ return tf.nn.relu(inputs);
}
}
}
diff --git a/src/TensorFlowNET.Keras/Layers/Activation/ReLu .cs b/src/TensorFlowNET.Keras/Layers/Activation/ReLu .cs
new file mode 100644
index 00000000..3bd07e76
--- /dev/null
+++ b/src/TensorFlowNET.Keras/Layers/Activation/ReLu .cs
@@ -0,0 +1,24 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Tensorflow.Keras.ArgsDefinition;
+using Tensorflow.Keras.Engine;
+using static Tensorflow.Binding;
+
+namespace Tensorflow.Keras.Layers
+{
+ ///
+ /// Leaky version of a Rectified Linear Unit.
+ ///
+ public class ReLu : Layer
+ {
+ public ReLu(LayerArgs args) : base(args)
+ {
+ }
+
+ protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
+ {
+ return tf.nn.relu(inputs, name: Name);
+ }
+ }
+}
diff --git a/src/TensorFlowNET.Keras/Layers/Activation/Sigmoid.cs b/src/TensorFlowNET.Keras/Layers/Activation/Sigmoid.cs
new file mode 100644
index 00000000..49ad8023
--- /dev/null
+++ b/src/TensorFlowNET.Keras/Layers/Activation/Sigmoid.cs
@@ -0,0 +1,24 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Tensorflow.Keras.ArgsDefinition;
+using Tensorflow.Keras.Engine;
+using static Tensorflow.Binding;
+
+namespace Tensorflow.Keras.Layers
+{
+ ///
+ /// Leaky version of a Rectified Linear Unit.
+ ///
+ public class Sigmoid : Layer
+ {
+ public Sigmoid(LayerArgs args) : base(args)
+ {
+ }
+
+ protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
+ {
+ return tf.nn.sigmoid(inputs, name: Name);
+ }
+ }
+}
diff --git a/src/TensorFlowNET.Keras/Layers/Activation/Tanh.cs b/src/TensorFlowNET.Keras/Layers/Activation/Tanh.cs
new file mode 100644
index 00000000..bdb3a5e2
--- /dev/null
+++ b/src/TensorFlowNET.Keras/Layers/Activation/Tanh.cs
@@ -0,0 +1,24 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Tensorflow.Keras.ArgsDefinition;
+using Tensorflow.Keras.Engine;
+using static Tensorflow.Binding;
+
+namespace Tensorflow.Keras.Layers
+{
+ ///
+ /// Leaky version of a Rectified Linear Unit.
+ ///
+ public class Tanh : Layer
+ {
+ public Tanh(LayerArgs args) : base(args)
+ {
+ }
+
+ protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
+ {
+ return tf.nn.tanh(inputs, name: Name);
+ }
+ }
+}
diff --git a/src/TensorFlowNET.Keras/Layers/LayersApi.cs b/src/TensorFlowNET.Keras/Layers/LayersApi.cs
index 03125e03..fbce83c9 100644
--- a/src/TensorFlowNET.Keras/Layers/LayersApi.cs
+++ b/src/TensorFlowNET.Keras/Layers/LayersApi.cs
@@ -315,6 +315,12 @@ namespace Tensorflow.Keras.Layers
return layer.Apply(inputs);
}
+ public Layer ReLU() => new ReLu(new LayerArgs { });
+
+ public Layer Tanh() => new Tanh(new LayerArgs { });
+
+ public Layer Sigmoid() => new Sigmoid(new LayerArgs { });
+
///
/// Leaky version of a Rectified Linear Unit.
///
@@ -392,6 +398,8 @@ namespace Tensorflow.Keras.Layers
=> name switch
{
"glorot_uniform" => tf.glorot_uniform_initializer,
+ "random_uniform" => tf.random_uniform_initializer,
+ "orthogonal" => tf.orthogonal_initializer,
"zeros" => tf.zeros_initializer,
"ones" => tf.ones_initializer,
_ => tf.glorot_uniform_initializer
diff --git a/test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs b/test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs
index 62e8ae28..86574eeb 100644
--- a/test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs
+++ b/test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs
@@ -11,6 +11,31 @@ namespace TensorFlowNET.Keras.UnitTest
[TestClass]
public class ActivationTest : EagerModeTestBase
{
+ [TestMethod]
+ public void ReLU()
+ {
+ var layer = keras.layers.ReLU();
+ Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
+ Equal(new[] { 0.0f, 0.0f, 0.0f, 2.0f }, output.ToArray());
+ }
+
+ [TestMethod]
+ public void Sigmoid()
+ {
+ var layer = keras.layers.Sigmoid();
+ Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
+ Equal(new[] { 0.047425866f, 0.26894143f, 0.5f, 0.8807971f }, output.ToArray());
+ }
+
+ [TestMethod]
+ public void Tanh()
+ {
+ var layer = keras.layers.Tanh();
+ Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
+ // {-0.9950547f, -0.7615942f, 0f, 0.9640276f}
+ Equal(new[] { -0.9950547f, -0.7615942f, 0f, 0.9640276f }, output.ToArray());
+ }
+
[TestMethod]
public void LeakyReLU()
{
diff --git a/test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs b/test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs
index f7e6155c..baca83c3 100644
--- a/test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs
+++ b/test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs
@@ -123,7 +123,9 @@ namespace TensorFlowNET.Keras.UnitTest
{ 2, 3, 4, 5 },
{ 3, 4, 5, 6 }
});
- // model.compile("rmsprop", "mse");
+ model.compile(optimizer: keras.optimizers.RMSprop(0.001f),
+ loss: keras.losses.MeanSquaredError(),
+ metrics: new[] { "acc" });
var output_array = model.predict(input_array);
Assert.AreEqual((32, 10, 64), output_array.shape);
}