Browse Source

add grad for LeakyRelu.

tags/v0.12
Oceania2018 6 years ago
parent
commit
71dd19d159
3 changed files with 21 additions and 1 deletions
  1. +0
    -1
      src/TensorFlowNET.Core/APIs/tf.queue.cs
  2. +9
    -0
      src/TensorFlowNET.Core/Gradients/nn_grad.cs
  3. +12
    -0
      src/TensorFlowNET.Core/Operations/NnOps/gen_nn_ops.cs

+ 0
- 1
src/TensorFlowNET.Core/APIs/tf.queue.cs View File

@@ -52,7 +52,6 @@ namespace Tensorflow
=> new PaddingFIFOQueue(capacity,
new [] { dtype },
new[] { shape },
new[] { name },
shared_name: shared_name,
name: name);



+ 9
- 0
src/TensorFlowNET.Core/Gradients/nn_grad.cs View File

@@ -47,6 +47,15 @@ namespace Tensorflow.Gradients
return new Tensor[] { gen_nn_ops.relu_grad(grads[0], op.outputs[0]) };
}

[RegisterGradient("LeakyRelu")]
public static Tensor[] _LeakyReluGrad(Operation op, Tensor[] grads)
{
var grad = grads[0];
var x = op.inputs[0];
var alpha = (float)op.get_attr("alpha");
return new Tensor[] { gen_nn_ops.leaky_relu_grad(grad, x, alpha: alpha)};
}

/// <summary>
/// The derivative of the softmax nonlinearity.
/// </summary>


+ 12
- 0
src/TensorFlowNET.Core/Operations/NnOps/gen_nn_ops.cs View File

@@ -284,6 +284,18 @@ namespace Tensorflow.Operations
});

return _op.outputs[0];
}
public static Tensor leaky_relu_grad(Tensor gradients, Tensor features, float alpha = 0.2f, string name = null)
{
var _op = _op_def_lib._apply_op_helper("LeakyReluGrad", name: name, args: new
{
gradients,
features,
alpha
});

return _op.output;
}

public static Tensor softmax(Tensor logits, string name = null)


Loading…
Cancel
Save