Browse Source

add tf.nn.elu activation function. #294

tags/v0.10
Oceania2018 6 years ago
parent
commit
e1db889a5b
3 changed files with 25 additions and 1 deletions
  1. +3
    -0
      src/TensorFlowNET.Core/APIs/tf.nn.cs
  2. +21
    -0
      src/TensorFlowNET.Core/Operations/NnOps/gen_nn_ops.cs
  3. +1
    -1
      test/TensorFlowNET.Examples/ImageProcessing/RetrainImageClassifier.cs

+ 3
- 0
src/TensorFlowNET.Core/APIs/tf.nn.cs View File

@@ -97,6 +97,9 @@ namespace Tensorflow
throw new NotImplementedException(""); throw new NotImplementedException("");
} }


public static Tensor elu(Tensor features, string name = null)
=> gen_nn_ops.elu(features, name: name);

public static (Tensor, Tensor) moments(Tensor x, public static (Tensor, Tensor) moments(Tensor x,
int[] axes, int[] axes,
string name = null, string name = null,


+ 21
- 0
src/TensorFlowNET.Core/Operations/NnOps/gen_nn_ops.cs View File

@@ -139,6 +139,27 @@ namespace Tensorflow.Operations
}); });


return _op.outputs[0]; return _op.outputs[0];
}
/// <summary>
/// Computes exponential linear: <c>exp(features) - 1</c> if &amp;lt; 0, <c>features</c> otherwise.
/// </summary>
/// <param name="features">
/// </param>
/// <param name="name">
/// If specified, the created operation in the graph will be this one, otherwise it will be named 'Elu'.
/// </param>
/// <returns>
/// The Operation can be fetched from the resulting Tensor, by fetching the Operation property from the result.
/// </returns>
/// <remarks>
/// See [Fast and Accurate Deep Network Learning by Exponential Linear Units (ELUs)
/// ](http://arxiv.org/abs/1511.07289)
/// </remarks>
public static Tensor elu(Tensor features, string name = "Elu")
{
var op = _op_def_lib._apply_op_helper("Elu", name: name, args: new { features });
return op.output;
} }


public static Tensor[] _fused_batch_norm(Tensor x, public static Tensor[] _fused_batch_norm(Tensor x,


+ 1
- 1
test/TensorFlowNET.Examples/ImageProcessing/RetrainImageClassifier.cs View File

@@ -199,7 +199,7 @@ namespace TensorFlowNET.Examples.ImageProcess
RefVariable layer_biases = null; RefVariable layer_biases = null;
with(tf.name_scope("biases"), delegate with(tf.name_scope("biases"), delegate
{ {
layer_biases = tf.Variable(tf.zeros((class_count)), name: "final_biases");
layer_biases = tf.Variable(tf.zeros(class_count), name: "final_biases");
variable_summaries(layer_biases); variable_summaries(layer_biases);
}); });




Loading…
Cancel
Save