Browse Source

UnitTests

pull/1248/head
Sean Chen 1 year ago
parent
commit
d1f4c136e4
4 changed files with 42 additions and 6 deletions
  1. +1
    -0
      src/TensorFlowNET.Core/Keras/Regularizers/IRegularizer.cs
  2. +1
    -1
      src/TensorFlowNET.Core/Operations/Regularizers/L1.cs
  3. +32
    -4
      src/TensorFlowNET.Keras/Regularizers.cs
  4. +8
    -1
      test/TensorFlowNET.Keras.UnitTest/Model/ModelSaveTest.cs

+ 1
- 0
src/TensorFlowNET.Core/Keras/Regularizers/IRegularizer.cs View File

@@ -16,6 +16,7 @@ namespace Tensorflow.Keras


public interface IRegularizerApi public interface IRegularizerApi
{ {
IRegularizer GetRegularizerFromName(string name);
IRegularizer L1 { get; } IRegularizer L1 { get; }
IRegularizer L2 { get; } IRegularizer L2 { get; }
IRegularizer L1L2 { get; } IRegularizer L1L2 { get; }


+ 1
- 1
src/TensorFlowNET.Core/Operations/Regularizers/L1.cs View File

@@ -9,7 +9,7 @@ namespace Tensorflow.Operations.Regularizers
float _l1; float _l1;
private readonly Dictionary<string, object> _config; private readonly Dictionary<string, object> _config;


public string ClassName => "L2";
public string ClassName => "L1";
public virtual IDictionary<string, object> Config => _config; public virtual IDictionary<string, object> Config => _config;


public L1(float l1 = 0.01f) public L1(float l1 = 0.01f)


+ 32
- 4
src/TensorFlowNET.Keras/Regularizers.cs View File

@@ -1,23 +1,51 @@
namespace Tensorflow.Keras
using Tensorflow.Operations.Regularizers;

namespace Tensorflow.Keras
{ {
public class Regularizers: IRegularizerApi public class Regularizers: IRegularizerApi
{ {
private static Dictionary<string, IRegularizer> _nameActivationMap;

public IRegularizer l1(float l1 = 0.01f) public IRegularizer l1(float l1 = 0.01f)
=> new Tensorflow.Operations.Regularizers.L1(l1);
=> new L1(l1);
public IRegularizer l2(float l2 = 0.01f) public IRegularizer l2(float l2 = 0.01f)
=> new Tensorflow.Operations.Regularizers.L2(l2);
=> new L2(l2);


//From TF source //From TF source
//# The default value for l1 and l2 are different from the value in l1_l2 //# The default value for l1 and l2 are different from the value in l1_l2
//# for backward compatibility reason. Eg, L1L2(l2=0.1) will only have l2 //# for backward compatibility reason. Eg, L1L2(l2=0.1) will only have l2
//# and no l1 penalty. //# and no l1 penalty.
public IRegularizer l1l2(float l1 = 0.00f, float l2 = 0.00f) public IRegularizer l1l2(float l1 = 0.00f, float l2 = 0.00f)
=> new Tensorflow.Operations.Regularizers.L1L2(l1, l2);
=> new L1L2(l1, l2);

static Regularizers()
{
_nameActivationMap = new Dictionary<string, IRegularizer>();
_nameActivationMap["L1"] = new L1();
_nameActivationMap["L1"] = new L2();
_nameActivationMap["L1"] = new L1L2();
}


public IRegularizer L1 => l1(); public IRegularizer L1 => l1();


public IRegularizer L2 => l2(); public IRegularizer L2 => l2();


public IRegularizer L1L2 => l1l2(); public IRegularizer L1L2 => l1l2();

public IRegularizer GetRegularizerFromName(string name)
{
if (name == null)
{
throw new Exception($"Regularizer name cannot be null");
}
if (!_nameActivationMap.TryGetValue(name, out var res))
{
throw new Exception($"Regularizer {name} not found");
}
else
{
return res;
}
}
} }
} }

+ 8
- 1
test/TensorFlowNET.Keras.UnitTest/Model/ModelSaveTest.cs View File

@@ -2,6 +2,7 @@
using System.Collections.Generic; using System.Collections.Generic;
using System.Diagnostics; using System.Diagnostics;
using Tensorflow.Keras.Engine; using Tensorflow.Keras.Engine;
using Tensorflow.Keras.Layers;
using Tensorflow.Keras.Models; using Tensorflow.Keras.Models;
using Tensorflow.Keras.Optimizers; using Tensorflow.Keras.Optimizers;
using Tensorflow.Keras.Saving; using Tensorflow.Keras.Saving;
@@ -108,7 +109,13 @@ namespace Tensorflow.Keras.UnitTest.Model
tf.keras.layers.BatchNormalization(), tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D((3, 3), strides:(2, 2)), tf.keras.layers.MaxPooling2D((3, 3), strides:(2, 2)),


tf.keras.layers.Conv2D(256, (5, 5), (1, 1), "same", activation: "relu"),
tf.keras.layers.Conv2D(256, (5, 5), (1, 1), "same", activation: keras.activations.Relu, bias_regularizer:keras.regularizers.L1L2),
tf.keras.layers.BatchNormalization(),

tf.keras.layers.Conv2D(256, (5, 5), (1, 1), "same", activation: keras.activations.Relu, bias_regularizer:keras.regularizers.L2),
tf.keras.layers.BatchNormalization(),

tf.keras.layers.Conv2D(256, (5, 5), (1, 1), "same", activation: keras.activations.Relu, bias_regularizer:keras.regularizers.L1),
tf.keras.layers.BatchNormalization(), tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D((3, 3), (2, 2)), tf.keras.layers.MaxPooling2D((3, 3), (2, 2)),




Loading…
Cancel
Save