diff --git a/src/TensorFlowNET.Core/Gradients/math_grad.cs b/src/TensorFlowNET.Core/Gradients/math_grad.cs index b3c620c0..1f24afb0 100644 --- a/src/TensorFlowNET.Core/Gradients/math_grad.cs +++ b/src/TensorFlowNET.Core/Gradients/math_grad.cs @@ -319,7 +319,7 @@ namespace Tensorflow.Gradients [RegisterGradient("Maximum")] public static Tensor[] _MaximumGrad(Operation op, Tensor[] grads) { - return _MaximumMinimumGrad(op, grads[0]); + return _MaximumMinimumGrad(true, op, grads[0]); } /// @@ -331,7 +331,7 @@ namespace Tensorflow.Gradients [RegisterGradient("Minimum")] public static Tensor[] _MinimumGrad(Operation op, Tensor[] grads) { - return _MaximumMinimumGrad(op, grads[0]); + return _MaximumMinimumGrad(false, op, grads[0]); } /// @@ -340,7 +340,7 @@ namespace Tensorflow.Gradients /// /// /// - private static Tensor[] _MaximumMinimumGrad(Operation op, Tensor grad) + private static Tensor[] _MaximumMinimumGrad(bool isMaximum, Operation op, Tensor grad) { var x = op.inputs[0]; var y = op.inputs[1]; @@ -349,7 +349,10 @@ namespace Tensorflow.Gradients var sy = array_ops.shape(y); var gradshape = array_ops.shape(grad); var zeros = array_ops.zeros(gradshape, gdtype); - var xmask = gen_math_ops.greater_equal(x, y); + var xmask = + isMaximum + ? gen_math_ops.greater_equal(x, y) + : gen_math_ops.less_equal(x, y); var (rx, ry) = gen_array_ops.broadcast_gradient_args(sx, sy); var xgrad = array_ops.where(xmask, grad, zeros); var ygrad = array_ops.where(xmask, zeros, grad); @@ -512,6 +515,72 @@ namespace Tensorflow.Gradients }); } + [RegisterGradient("Sqrt")] + public static Tensor[] _SqrtGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var y = op.outputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + y = math_ops.conj(y); + var factor = constant_op.constant(0.5f, dtype: y.dtype); + return new Tensor[] { grad * (factor * math_ops.reciprocal(y)) }; + }); + } + + [RegisterGradient("Sin")] + public static Tensor[] _SinGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var x = op.inputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + x = math_ops.conj(x); + return new Tensor[] { math_ops.multiply(grad, gen_math_ops.cos(x)) }; + }); + } + + [RegisterGradient("Sinh")] + public static Tensor[] _SinhGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var x = op.inputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + x = math_ops.conj(x); + return new Tensor[] { math_ops.multiply(grad, gen_math_ops.cosh(x)) }; + }); + } + + [RegisterGradient("Cos")] + public static Tensor[] _CosGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var x = op.inputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + x = math_ops.conj(x); + return new Tensor[] { math_ops.multiply(grad, -gen_math_ops.sin(x)) }; + }); + } + + [RegisterGradient("Cosh")] + public static Tensor[] _CoshGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var x = op.inputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + x = math_ops.conj(x); + return new Tensor[] { math_ops.multiply(grad, gen_math_ops.sinh(x)) }; + }); + } + [RegisterGradient("Tanh")] public static Tensor[] _TanhGrad(Operation op, Tensor[] grads) {