Browse Source

Adding some activation layers.

pull/756/head
Niklas Gustafsson 4 years ago
parent
commit
19f380523e
7 changed files with 109 additions and 2 deletions
  1. +1
    -1
      src/TensorFlowNET.Keras/Layers/Activation/LeakyReLu.cs
  2. +24
    -0
      src/TensorFlowNET.Keras/Layers/Activation/ReLu .cs
  3. +24
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Sigmoid.cs
  4. +24
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Tanh.cs
  5. +8
    -0
      src/TensorFlowNET.Keras/Layers/LayersApi.cs
  6. +25
    -0
      test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs
  7. +3
    -1
      test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs

+ 1
- 1
src/TensorFlowNET.Keras/Layers/Activation/LeakyReLu.cs View File

@@ -21,7 +21,7 @@ namespace Tensorflow.Keras.Layers


protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{ {
return tf.nn.leaky_relu(inputs, alpha: alpha);
return tf.nn.relu(inputs);
} }
} }
} }

+ 24
- 0
src/TensorFlowNET.Keras/Layers/Activation/ReLu .cs View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
public class ReLu : Layer
{
public ReLu(LayerArgs args) : base(args)
{
}

protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
return tf.nn.relu(inputs, name: Name);
}
}
}

+ 24
- 0
src/TensorFlowNET.Keras/Layers/Activation/Sigmoid.cs View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
public class Sigmoid : Layer
{
public Sigmoid(LayerArgs args) : base(args)
{
}

protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
return tf.nn.sigmoid(inputs, name: Name);
}
}
}

+ 24
- 0
src/TensorFlowNET.Keras/Layers/Activation/Tanh.cs View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
public class Tanh : Layer
{
public Tanh(LayerArgs args) : base(args)
{
}

protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
return tf.nn.tanh(inputs, name: Name);
}
}
}

+ 8
- 0
src/TensorFlowNET.Keras/Layers/LayersApi.cs View File

@@ -315,6 +315,12 @@ namespace Tensorflow.Keras.Layers
return layer.Apply(inputs); return layer.Apply(inputs);
} }


public Layer ReLU() => new ReLu(new LayerArgs { });

public Layer Tanh() => new Tanh(new LayerArgs { });

public Layer Sigmoid() => new Sigmoid(new LayerArgs { });

/// <summary> /// <summary>
/// Leaky version of a Rectified Linear Unit. /// Leaky version of a Rectified Linear Unit.
/// </summary> /// </summary>
@@ -392,6 +398,8 @@ namespace Tensorflow.Keras.Layers
=> name switch => name switch
{ {
"glorot_uniform" => tf.glorot_uniform_initializer, "glorot_uniform" => tf.glorot_uniform_initializer,
"random_uniform" => tf.random_uniform_initializer,
"orthogonal" => tf.orthogonal_initializer,
"zeros" => tf.zeros_initializer, "zeros" => tf.zeros_initializer,
"ones" => tf.ones_initializer, "ones" => tf.ones_initializer,
_ => tf.glorot_uniform_initializer _ => tf.glorot_uniform_initializer


+ 25
- 0
test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs View File

@@ -11,6 +11,31 @@ namespace TensorFlowNET.Keras.UnitTest
[TestClass] [TestClass]
public class ActivationTest : EagerModeTestBase public class ActivationTest : EagerModeTestBase
{ {
[TestMethod]
public void ReLU()
{
var layer = keras.layers.ReLU();
Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
Equal(new[] { 0.0f, 0.0f, 0.0f, 2.0f }, output.ToArray<float>());
}

[TestMethod]
public void Sigmoid()
{
var layer = keras.layers.Sigmoid();
Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
Equal(new[] { 0.047425866f, 0.26894143f, 0.5f, 0.8807971f }, output.ToArray<float>());
}
[TestMethod]
public void Tanh()
{
var layer = keras.layers.Tanh();
Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
// {-0.9950547f, -0.7615942f, 0f, 0.9640276f}
Equal(new[] { -0.9950547f, -0.7615942f, 0f, 0.9640276f }, output.ToArray<float>());
}

[TestMethod] [TestMethod]
public void LeakyReLU() public void LeakyReLU()
{ {


+ 3
- 1
test/TensorFlowNET.Keras.UnitTest/Layers/LayersTest.cs View File

@@ -123,7 +123,9 @@ namespace TensorFlowNET.Keras.UnitTest
{ 2, 3, 4, 5 }, { 2, 3, 4, 5 },
{ 3, 4, 5, 6 } { 3, 4, 5, 6 }
}); });
// model.compile("rmsprop", "mse");
model.compile(optimizer: keras.optimizers.RMSprop(0.001f),
loss: keras.losses.MeanSquaredError(),
metrics: new[] { "acc" });
var output_array = model.predict(input_array); var output_array = model.predict(input_array);
Assert.AreEqual((32, 10, 64), output_array.shape); Assert.AreEqual((32, 10, 64), output_array.shape);
} }


Loading…
Cancel
Save