| @@ -216,6 +216,31 @@ namespace Tensorflow.Gradients | |||||
| return new Tensor[] { reshape2, reshape1 }; | return new Tensor[] { reshape2, reshape1 }; | ||||
| } | } | ||||
| public static Tensor[] _SigmoidGrad(Operation op, Tensor[] grads) | |||||
| { | |||||
| var grad = grads[0]; | |||||
| var y = op.outputs[0]; | |||||
| return with(ops.control_dependencies(grads), delegate | |||||
| { | |||||
| y = math_ops.conj(y); | |||||
| return new Tensor[] { gen_math_ops.sigmoid_grad(y, grad) }; | |||||
| }); | |||||
| } | |||||
| public static Tensor[] _SquareGrad(Operation op, Tensor[] grads) | |||||
| { | |||||
| var grad = grads[0]; | |||||
| var x = op.inputs[0]; | |||||
| return with(ops.control_dependencies(grads), delegate | |||||
| { | |||||
| x = math_ops.conj(x); | |||||
| var y = constant_op.constant(2.0f, dtype: x.dtype); | |||||
| return new Tensor[] { math_ops.multiply(grad, math_ops.multiply(x, y)) }; | |||||
| }); | |||||
| } | |||||
| public static Tensor[] _PowGrad(Operation op, Tensor[] grads) | public static Tensor[] _PowGrad(Operation op, Tensor[] grads) | ||||
| { | { | ||||
| var grad = grads[0]; | var grad = grads[0]; | ||||
| @@ -48,6 +48,10 @@ namespace Tensorflow | |||||
| return array_grad._ReshapeGrad(oper, out_grads); | return array_grad._ReshapeGrad(oper, out_grads); | ||||
| case "Relu": | case "Relu": | ||||
| return nn_grad._ReluGrad(oper, out_grads); | return nn_grad._ReluGrad(oper, out_grads); | ||||
| case "Sigmoid": | |||||
| return math_grad._SigmoidGrad(oper, out_grads); | |||||
| case "Square": | |||||
| return math_grad._SquareGrad(oper, out_grads); | |||||
| case "Squeeze": | case "Squeeze": | ||||
| return array_grad._SqueezeGrad(oper, out_grads); | return array_grad._SqueezeGrad(oper, out_grads); | ||||
| case "Softmax": | case "Softmax": | ||||
| @@ -299,6 +299,7 @@ namespace Tensorflow | |||||
| /// <returns> A `Tensor`. Has the same type as `input`.</returns> | /// <returns> A `Tensor`. Has the same type as `input`.</returns> | ||||
| public static Tensor squeeze(Tensor input, int[] axis = null, string name = null) | public static Tensor squeeze(Tensor input, int[] axis = null, string name = null) | ||||
| { | { | ||||
| if (axis == null) axis = new int[0]; | |||||
| var _op = _op_def_lib._apply_op_helper("Squeeze", name, args: new { input, squeeze_dims = axis }); | var _op = _op_def_lib._apply_op_helper("Squeeze", name, args: new { input, squeeze_dims = axis }); | ||||
| return _op.outputs[0]; | return _op.outputs[0]; | ||||
| @@ -128,6 +128,31 @@ namespace Tensorflow | |||||
| return op.output; | return op.output; | ||||
| } | } | ||||
| /// <summary> | |||||
| /// Computes the gradient of the sigmoid of <c>x</c> wrt its input. | |||||
| /// </summary> | |||||
| /// <param name="y"> | |||||
| /// </param> | |||||
| /// <param name="dy"> | |||||
| /// </param> | |||||
| /// <param name="name"> | |||||
| /// If specified, the created operation in the graph will be this one, otherwise it will be named 'SigmoidGrad'. | |||||
| /// </param> | |||||
| /// <returns> | |||||
| /// The Operation can be fetched from the resulting Tensor, by fetching the Operation property from the result. | |||||
| /// </returns> | |||||
| /// <remarks> | |||||
| /// Specifically, <c>grad = dy * y * (1 - y)</c>, where <c>y = sigmoid(x)</c>, and | |||||
| /// <c>dy</c> is the corresponding input gradient. | |||||
| /// </remarks> | |||||
| public static Tensor sigmoid_grad(Tensor y, Tensor dy, string name = "SigmoidGrad") | |||||
| { | |||||
| var op = _op_def_lib._apply_op_helper("SigmoidGrad", name: name, args: new { y, dy }); | |||||
| return op.outputs[0]; | |||||
| } | |||||
| public static Tensor sinh(Tensor x, string name = null) | public static Tensor sinh(Tensor x, string name = null) | ||||
| { | { | ||||
| var _op = _op_def_lib._apply_op_helper("Sinh", name, args: new { x }); | var _op = _op_def_lib._apply_op_helper("Sinh", name, args: new { x }); | ||||
| @@ -46,29 +46,26 @@ namespace TensorFlowNET.Examples | |||||
| public bool Run() | public bool Run() | ||||
| { | { | ||||
| var graph = tf.Graph().as_default(); | |||||
| var features = tf.placeholder(tf.float32, new TensorShape(4, 2)); | |||||
| var labels = tf.placeholder(tf.int32, new TensorShape(4)); | |||||
| var (train_op, loss, gs) = make_graph(features, labels); | |||||
| var graph = tf.Graph(); | |||||
| var init=with(graph.as_default(), g => | |||||
| { | |||||
| var features = tf.placeholder(tf.float32, new TensorShape(4, 2)); | |||||
| var labels = tf.placeholder(tf.int32, new TensorShape(4)); | |||||
| var (train_op, loss, gs) = make_graph(features, labels); | |||||
| return tf.global_variables_initializer(); | |||||
| }); | |||||
| var init = tf.global_variables_initializer(); | |||||
| // Start tf session | // Start tf session | ||||
| with<Session>(tf.Session(), sess => | |||||
| with(tf.Session(graph), sess => | |||||
| { | { | ||||
| init.run(); | init.run(); | ||||
| var step = 0; | var step = 0; | ||||
| var xy = np.array(new bool[,] | var xy = np.array(new bool[,] | ||||
| { | { | ||||
| {true, false, }, | |||||
| {true, true, }, | |||||
| {false, false, }, | |||||
| {false, true, }, | |||||
| {true, false}, | |||||
| {true, true }, | |||||
| {false, false }, | |||||
| {false, true}, | |||||
| }, dtype: np.float32); | }, dtype: np.float32); | ||||
| var y_ = np.array(new[] {true, false, false, true}, dtype: np.int32); | var y_ = np.array(new[] {true, false, false, true}, dtype: np.int32); | ||||