Browse Source

Add Subtract layer.

tags/yolov3
Oceania2018 4 years ago
parent
commit
26a04bd083
10 changed files with 118 additions and 38 deletions
  1. +0
    -12
      src/TensorFlowNET.Keras/Engine/Interfaces/ITensorFlowOpLayer.cs
  2. +5
    -1
      src/TensorFlowNET.Keras/Layers/LayersApi.cs
  3. +23
    -0
      src/TensorFlowNET.Keras/Layers/Merging/Subtract.cs
  4. +73
    -0
      src/TensorFlowNET.Keras/Layers/TensorFlowOpLayer.cs
  5. +3
    -3
      src/TensorFlowNET.Keras/Losses/Huber.cs
  6. +1
    -3
      src/TensorFlowNET.Keras/Losses/LogCosh.cs
  7. +1
    -1
      src/TensorFlowNET.Keras/Losses/MeanAbsolutePercentageError.cs
  8. +1
    -1
      src/TensorFlowNET.Keras/Losses/MeanSquaredError.cs
  9. +7
    -1
      src/TensorFlowNET.Keras/Tensorflow.Keras.csproj
  10. +4
    -16
      src/TensorFlowNET.Keras/Utils/base_layer_utils.cs

+ 0
- 12
src/TensorFlowNET.Keras/Engine/Interfaces/ITensorFlowOpLayer.cs View File

@@ -1,12 +0,0 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;

namespace Tensorflow.Keras.Engine
{
public interface ITensorFlowOpLayer
{
Layer GetOpLayer(TensorFlowOpLayerArgs args);
}
}

+ 5
- 1
src/TensorFlowNET.Keras/Layers/LayersApi.cs View File

@@ -142,6 +142,7 @@ namespace Tensorflow.Keras.Layers
public Dense Dense(int units,
Activation activation = null,
IInitializer kernel_initializer = null,
bool use_bias = true,
IInitializer bias_initializer = null,
TensorShape input_shape = null)
=> new Dense(new DenseArgs
@@ -149,7 +150,7 @@ namespace Tensorflow.Keras.Layers
Units = units,
Activation = activation ?? keras.activations.Linear,
KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer,
BiasInitializer = bias_initializer ?? tf.zeros_initializer,
BiasInitializer = bias_initializer ?? (use_bias ? tf.zeros_initializer : null),
InputShape = input_shape
});

@@ -375,6 +376,9 @@ namespace Tensorflow.Keras.Layers
public Add Add()
=> new Add(new MergeArgs { });

public Subtract Subtract()
=> new Subtract(new MergeArgs { });

public GlobalAveragePooling2D GlobalAveragePooling2D()
=> new GlobalAveragePooling2D(new Pooling2DArgs { });



+ 23
- 0
src/TensorFlowNET.Keras/Layers/Merging/Subtract.cs View File

@@ -0,0 +1,23 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
public class Subtract : Merge
{
public Subtract(MergeArgs args) : base(args)
{

}

protected override Tensors _merge_function(Tensors inputs)
{
if (len(inputs) != 2)
throw new ValueError($"A `Subtract` layer should be called on exactly 2 inputs");
return inputs[0] - inputs[1];
}
}
}

+ 73
- 0
src/TensorFlowNET.Keras/Layers/TensorFlowOpLayer.cs View File

@@ -0,0 +1,73 @@
using NumSharp;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Tensorflow;
using Tensorflow.Graphs;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
public class TensorFlowOpLayer : Layer
{
TensorFlowOpLayerArgs args;
Dictionary<int, NDArray> constants => args.Constants;
NodeDef node_def => args.NodeDef;
static string TF_OP_LAYER_NAME_PREFIX = "tf_op_layer_";
public string OpType => node_def.Op;

public TensorFlowOpLayer(TensorFlowOpLayerArgs args)
: base(new LayerArgs
{
Name = TF_OP_LAYER_NAME_PREFIX + args.Name,
Trainable = args.Trainable,
DType = args.DType,
Autocast = false
})
{
this.args = args;
built = true;
}

protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
if (tf.Context.executing_eagerly())
return _defun_call(inputs);
return MakOp(inputs);
}

[AutoGraph]
Tensors _defun_call(Tensors inputs)
=> MakOp(inputs);

Tensors MakOp(Tensors inputs)
{
var graph = inputs.graph;
graph.as_default();
foreach (var (index, constant) in enumerate(constants))
{
var value = constant_op.constant(constant, name: node_def.Input[index]);
inputs.Insert(index, value);
}

var (c_op, _) = ops._create_c_op(graph, node_def, inputs.ToArray(), new Operation[0]);
var op = graph._create_op_from_tf_operation(c_op);
op._control_flow_post_processing();

// Record the gradient because custom-made ops don't go through the
// code-gen'd eager call path
var op_type = op.node_def.Op;

tf.Runner.RecordGradient(op_type, op.inputs._inputs, null, op.outputs);

graph.Exit();
return op.outputs;
}

public Layer GetOpLayer(TensorFlowOpLayerArgs args)
=> new TensorFlowOpLayer(args);
}
}

+ 3
- 3
src/TensorFlowNET.Keras/Losses/Huber.cs View File

@@ -27,10 +27,10 @@ namespace Tensorflow.Keras.Losses
Tensor error = math_ops.subtract(y_pred_cast, y_true_cast);
Tensor abs_error = math_ops.abs(error);
Tensor half = ops.convert_to_tensor(0.5, dtype: abs_error.dtype);
return gen_math_ops.mean(array_ops.where_v2(abs_error <= delta,
half * math_ops.pow(error, 2),
return gen_math_ops.mean(array_ops.where_v2(abs_error <= delta,
half * math_ops.pow(error, 2),
half * math_ops.pow(delta, 2) + delta * (abs_error - delta)),
axis : -1);
axis: -1);
}
}
}

+ 1
- 3
src/TensorFlowNET.Keras/Losses/LogCosh.cs View File

@@ -19,10 +19,8 @@ namespace Tensorflow.Keras.Losses
Tensor y_pred_dispatch = ops.convert_to_tensor(y_pred);
Tensor y_true_cast = gen_math_ops.cast(y_true, y_pred_dispatch.dtype);
Tensor x = y_pred_dispatch - y_true_cast;
return gen_math_ops.mean(x + gen_math_ops.softplus(-2.0 * x) - math_ops.cast(math_ops.log(tf.Variable(2.0)), x.dtype),axis: -1);

return gen_math_ops.mean(x + gen_math_ops.softplus(-2.0 * x) - math_ops.cast(math_ops.log(tf.Variable(2.0)), x.dtype), axis: -1);
}
}
}

+ 1
- 1
src/TensorFlowNET.Keras/Losses/MeanAbsolutePercentageError.cs View File

@@ -18,7 +18,7 @@ namespace Tensorflow.Keras.Losses
Tensor y_pred_dispatch = ops.convert_to_tensor(y_pred);
Tensor y_true_cast = gen_math_ops.cast(y_true, y_pred_dispatch.dtype);
Tensor diff = math_ops.abs(y_true_cast - y_pred_dispatch) / gen_math_ops.maximum(math_ops.abs(y_true_cast), gen_math_ops.cast(tf.constant(1e-7), y_pred_dispatch.dtype));
return gen_math_ops.cast(tf.constant(100), y_pred_dispatch.dtype) *gen_math_ops.mean(diff, axis: -1);
return gen_math_ops.cast(tf.constant(100), y_pred_dispatch.dtype) * gen_math_ops.mean(diff, axis: -1);
}
}
}

+ 1
- 1
src/TensorFlowNET.Keras/Losses/MeanSquaredError.cs View File

@@ -17,7 +17,7 @@ namespace Tensorflow.Keras.Losses
{
Tensor y_pred_dispatch = ops.convert_to_tensor(y_pred);
Tensor y_true_cast = gen_math_ops.cast(y_true, y_pred_dispatch.dtype);
return gen_math_ops.mean(gen_math_ops.squared_difference(y_pred_dispatch, y_true_cast), axis: -1);
return gen_math_ops.mean(gen_math_ops.squared_difference(y_pred_dispatch, y_true_cast), axis: -1);
}
}
}

+ 7
- 1
src/TensorFlowNET.Keras/Tensorflow.Keras.csproj View File

@@ -21,7 +21,9 @@
* Support BatchNormalization layer.
* Building keras model in subclass, functional and sequential api
* Implemented backward_function.
* Support model.load_weights.</PackageReleaseNotes>
* Support model.load_weights.
* Add Subtract layer
* Support YOLOv3 model.</PackageReleaseNotes>
<Description>Keras for .NET

Keras is an API designed for human beings, not machines. Keras follows best practices for reducing cognitive load: it offers consistent &amp; simple APIs, it minimizes the number of user actions required for common use cases, and it provides clear &amp; actionable error messages.</Description>
@@ -64,4 +66,8 @@ Keras is an API designed for human beings, not machines. Keras follows best prac
</None>
</ItemGroup>

<ItemGroup>
<Folder Include="Engine\Interfaces\" />
</ItemGroup>

</Project>

+ 4
- 16
src/TensorFlowNET.Keras/Utils/base_layer_utils.cs View File

@@ -21,6 +21,7 @@ using System.Linq;
using System.Reflection;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using Tensorflow.Keras.Layers;
using static Tensorflow.Binding;
using static Tensorflow.KerasApi;

@@ -150,12 +151,13 @@ namespace Tensorflow.Keras.Utils

// recursively
CreateKerasHistoryHelper(layer_inputs, processed_ops, created_layers);
var op_layer = GetLayer<ITensorFlowOpLayer>(new TensorFlowOpLayerArgs
var opLayerArgs = new TensorFlowOpLayerArgs
{
NodeDef = op.node_def,
Constants = constants,
Name = op.name
});
};
var op_layer = new TensorFlowOpLayer(opLayerArgs);
created_layers.Add(op_layer);
op_layer.SetConnectivityMetadata(layer_inputs, op.outputs);
processed_ops.Add(op);
@@ -163,20 +165,6 @@ namespace Tensorflow.Keras.Utils
}
}

static Layer GetLayer<T>(LayerArgs args)
{
Layer layer = default;
var assemble = Assembly.Load("TensorFlow.Keras.Layers");
foreach (var type in assemble.GetTypes().Where(x => x.GetInterface(typeof(T).Name) != null))
{
layer = (Layer)Activator.CreateInstance(type, new object[] { args });
}

if (layer == null)
throw new NotImplementedException($"Can't find implementation for type {args.GetType().Name}");
return layer;
}

// recusive
static bool uses_keras_history(Tensor op_input)
{


Loading…
Cancel
Save