diff --git a/src/TensorFlowNET.Core/Graphs/Graph.cs b/src/TensorFlowNET.Core/Graphs/Graph.cs index ce4b2fc6..a24c4648 100644 --- a/src/TensorFlowNET.Core/Graphs/Graph.cs +++ b/src/TensorFlowNET.Core/Graphs/Graph.cs @@ -69,7 +69,8 @@ namespace Tensorflow private List _unfeedable_tensors = new List(); public string _name_stack = ""; - public string _graph_key; + private string _graph_key; + public string graph_key => _graph_key; public string _last_loss_reduction; public Status Status { get; } diff --git a/src/TensorFlowNET.Core/Keras/Engine/Sequential.cs b/src/TensorFlowNET.Core/Keras/Engine/Sequential.cs index 587a956a..bbff5da3 100644 --- a/src/TensorFlowNET.Core/Keras/Engine/Sequential.cs +++ b/src/TensorFlowNET.Core/Keras/Engine/Sequential.cs @@ -19,6 +19,10 @@ namespace Tensorflow.Keras.Engine } + /// + /// Adds a layer instance on top of the layer stack. + /// + /// public void add(Layer layer) { built = false; @@ -32,7 +36,7 @@ namespace Tensorflow.Keras.Engine var x = keras.layers.Input( batch_shape: batch_shape, dtype: dtype, - name: layer._name + "_input"); + name: layer.name + "_input"); // This will build the current layer // and create the node connecting the current layer diff --git a/src/TensorFlowNET.Core/Keras/Layers/Layer.cs b/src/TensorFlowNET.Core/Keras/Layers/Layer.cs index fe3dd36f..4bd9e088 100644 --- a/src/TensorFlowNET.Core/Keras/Layers/Layer.cs +++ b/src/TensorFlowNET.Core/Keras/Layers/Layer.cs @@ -4,6 +4,7 @@ using System.Linq; using System.Text; using Tensorflow.Keras.Engine; using Tensorflow.Keras.Utils; +using static Tensorflow.Python; namespace Tensorflow.Keras.Layers { @@ -33,7 +34,8 @@ namespace Tensorflow.Keras.Layers protected InputSpec input_spec; protected bool supports_masking; protected List _trainable_weights; - public string _name; + private string _name; + public string name => _name; protected string _base_name; protected bool _compute_previous_mask; protected List _updates; @@ -85,17 +87,24 @@ namespace Tensorflow.Keras.Layers // Handle Keras mask propagation from previous layer to current layer. Python.with(ops.name_scope(_name_scope()), delegate { - if (!built) + /*if (!built) { _maybe_build(inputs); built = true; - } + }*/ if (build_graph) { // Symbolic execution on symbolic tensors. We will attempt to build // the corresponding TF subgraph inside `backend.get_graph()` - var graph = backend.get_graph(); + var graph = backend.get_graph().as_default(); + with(ops.name_scope(_name_scope()), delegate + { + // Build layer if applicable (if the `build` method has been + // overridden). + _maybe_build(inputs[0]); + }); + outputs = call(inputs[0], training: training); _handle_activity_regularization(inputs[0], outputs); _set_mask_metadata(inputs[0], outputs, null); @@ -130,13 +139,17 @@ namespace Tensorflow.Keras.Layers protected virtual string _name_scope() { - return null; + return name; } - protected void _maybe_build(Tensor[] inputs) + protected void _maybe_build(Tensor input) { - var input_list = inputs; - build(input_list[0].GetShape()); + // Check input assumptions set before layer building, e.g. input rank. + if (built) + return; + + build(input.GetShape()); + built = true; } protected virtual void build(TensorShape input_shape) @@ -160,7 +173,7 @@ namespace Tensorflow.Keras.Layers var variable = _add_variable_with_custom_getter(name, shape, dtype: dtype, - getter: getter == null ? base_layer_utils.make_variable : getter, + //getter: getter == null ? base_layer_utils.make_variable : getter, overwrite: true, initializer: initializer, trainable: trainable.Value); @@ -176,12 +189,12 @@ namespace Tensorflow.Keras.Layers _updates.AddRange(updates_op); } - protected virtual void _init_set_name(string name) + protected virtual void _init_set_name(string name, bool zero_based = true) { - string base_name = name; if (name == null) - (_name, base_name) = _make_unique_name(); - _base_name = base_name; + _name = base_layer_utils.unique_layer_name(generic_utils.to_snake_case(this.GetType().Name), zero_based: zero_based); + else + _name = name; } protected virtual (string, string) _make_unique_name() diff --git a/src/TensorFlowNET.Core/Keras/Sequence.cs b/src/TensorFlowNET.Core/Keras/Sequence.cs index a0385f17..fe6dfc33 100644 --- a/src/TensorFlowNET.Core/Keras/Sequence.cs +++ b/src/TensorFlowNET.Core/Keras/Sequence.cs @@ -30,26 +30,6 @@ namespace Tensorflow.Keras object value = null) { int[] length = new int[sequences.size]; - switch (sequences.dtype.Name) - { - case "Object": - for (int i = 0; i < sequences.size; i++) - { - switch (sequences.Data(i)) - { - case string data: - length[i] = Regex.Matches(data, ",").Count; - break; - } - } - break; - case "Int32": - for (int i = 0; i < sequences.size; i++) - length[i] = Regex.Matches(sequences.Data(i).ToString(), ",").Count; - break; - default: - throw new NotImplementedException($"pad_sequences: {sequences.dtype.Name}"); - } if (maxlen == null) maxlen = length.Max(); diff --git a/src/TensorFlowNET.Core/Keras/Utils/base_layer_utils.cs b/src/TensorFlowNET.Core/Keras/Utils/base_layer_utils.cs index 682760f0..db927089 100644 --- a/src/TensorFlowNET.Core/Keras/Utils/base_layer_utils.cs +++ b/src/TensorFlowNET.Core/Keras/Utils/base_layer_utils.cs @@ -1,18 +1,39 @@ using System; using System.Collections.Generic; +using System.Linq; using System.Text; +using static Tensorflow.Python; namespace Tensorflow.Keras.Utils { public class base_layer_utils { + /// + /// Adds a new variable to the layer. + /// + /// + /// + /// + /// + /// + /// public static RefVariable make_variable(string name, int[] shape, TF_DataType dtype = TF_DataType.TF_FLOAT, IInitializer initializer = null, - bool trainable = false) + bool trainable = true, + bool use_resource = true) { - throw new NotImplementedException(""); + var initializing_from_value = false; + + ops.init_scope(); + + Func init_val = ()=> initializer.call(new TensorShape(shape), dtype: dtype); + + var variable_dtype = dtype.as_base_dtype(); + var v = tf.Variable(init_val); + + return v; } /// @@ -20,16 +41,56 @@ namespace Tensorflow.Keras.Utils /// /// /// - public static string unique_layer_name(string name) + public static string unique_layer_name(string name, Dictionary<(string, string), int> name_uid_map = null, + string[] avoid_names = null, string @namespace = "", bool zero_based = false) { - int number = get_default_graph_uid_map(); - return $"{name}_{number}"; + if(name_uid_map == null) + name_uid_map = get_default_graph_uid_map(); + if (avoid_names == null) + avoid_names = new string[0]; + + string proposed_name = null; + while(proposed_name == null || avoid_names.Contains(proposed_name)) + { + var name_key = (@namespace, name); + if (!name_uid_map.ContainsKey(name_key)) + name_uid_map[name_key] = 0; + + if (zero_based) + { + int number = name_uid_map[name_key]; + if (number > 0) + proposed_name = $"{name}_{number}"; + else + proposed_name = name; + + name_uid_map[name_key] += 1; + } + else + { + name_uid_map[name_key] += 1; + proposed_name = $"{name}_{name_uid_map[name_key]}"; + } + } + + return proposed_name; } - public static int get_default_graph_uid_map() + public static Dictionary<(string, string), int> get_default_graph_uid_map() { var graph = ops.get_default_graph(); - return graph._next_id(); + Dictionary<(string, string), int> name_uid_map = null; + if (backend.PER_GRAPH_LAYER_NAME_UIDS.ContainsKey(graph.graph_key)) + { + name_uid_map = backend.PER_GRAPH_LAYER_NAME_UIDS[graph.graph_key]; + } + else + { + name_uid_map = new Dictionary<(string, string), int>(); + backend.PER_GRAPH_LAYER_NAME_UIDS[graph.graph_key] = name_uid_map; + } + + return name_uid_map; } } } diff --git a/src/TensorFlowNET.Core/Keras/backend.cs b/src/TensorFlowNET.Core/Keras/backend.cs index 17ab0fbb..45d46ad4 100644 --- a/src/TensorFlowNET.Core/Keras/backend.cs +++ b/src/TensorFlowNET.Core/Keras/backend.cs @@ -6,6 +6,13 @@ namespace Tensorflow.Keras { public class backend { + /// + /// A global dictionary mapping graph objects to an index of counters used + /// for various layer names in each graph. + /// Allows to give unique autogenerated names to layers, in a graph-specific way. + /// + public static Dictionary> PER_GRAPH_LAYER_NAME_UIDS = new Dictionary>(); + public static void track_variable(RefVariable v) { diff --git a/src/TensorFlowNET.Core/Train/Checkpointable/CheckpointableBase.cs b/src/TensorFlowNET.Core/Train/Checkpointable/CheckpointableBase.cs index 7a61ec5b..c5592ccf 100644 --- a/src/TensorFlowNET.Core/Train/Checkpointable/CheckpointableBase.cs +++ b/src/TensorFlowNET.Core/Train/Checkpointable/CheckpointableBase.cs @@ -1,34 +1,12 @@ using System; using System.Collections.Generic; using System.Text; +using Tensorflow.Train; namespace Tensorflow { - public abstract class CheckpointableBase + public abstract class CheckpointableBase : Trackable { - /// - /// Restore-on-create for a variable be saved with this `Checkpointable`. - /// - /// - protected virtual RefVariable _add_variable_with_custom_getter(string name, - int[] shape, - TF_DataType dtype = TF_DataType.TF_FLOAT, - IInitializer initializer = null, - Func getter = null, - bool overwrite = false, - bool trainable = false) - { - var new_variable = getter(name, shape, dtype, initializer, trainable); - if (!overwrite || new_variable is RefVariable) - return _track_checkpointable(new_variable, name: name, - overwrite: overwrite); - else - return new_variable; - } - protected RefVariable _track_checkpointable(RefVariable checkpointable, string name, bool overwrite = false) - { - return checkpointable; - } } } diff --git a/src/TensorFlowNET.Core/Train/Trackable.cs b/src/TensorFlowNET.Core/Train/Trackable.cs new file mode 100644 index 00000000..e565b15b --- /dev/null +++ b/src/TensorFlowNET.Core/Train/Trackable.cs @@ -0,0 +1,34 @@ +using System; +using System.Collections.Generic; +using System.Text; + +namespace Tensorflow.Train +{ + public abstract class Trackable + { + /// + /// Restore-on-create for a variable be saved with this `Checkpointable`. + /// + /// + protected virtual RefVariable _add_variable_with_custom_getter(string name, + int[] shape, + TF_DataType dtype = TF_DataType.TF_FLOAT, + IInitializer initializer = null, + Func getter = null, + bool overwrite = false, + bool trainable = false) + { + var new_variable = getter(name, shape, dtype, initializer, trainable); + if (!overwrite || new_variable is RefVariable) + return _track_checkpointable(new_variable, name: name, + overwrite: overwrite); + else + return new_variable; + } + + protected RefVariable _track_checkpointable(RefVariable checkpointable, string name, bool overwrite = false) + { + return checkpointable; + } + } +} diff --git a/src/TensorFlowNET.Core/Variables/RefVariable.cs b/src/TensorFlowNET.Core/Variables/RefVariable.cs index 9ad86b09..3d586f4b 100644 --- a/src/TensorFlowNET.Core/Variables/RefVariable.cs +++ b/src/TensorFlowNET.Core/Variables/RefVariable.cs @@ -111,7 +111,7 @@ namespace Tensorflow // Store the graph key so optimizers know how to only retrieve variables from // this graph. - _graph_key = ops.get_default_graph()._graph_key; + _graph_key = ops.get_default_graph().graph_key; _trainable = trainable; if (trainable && !collections.Contains(ops.GraphKeys.TRAINABLE_VARIABLES)) diff --git a/test/TensorFlowNET.Examples/BasicModels/NaiveBayesClassifier.cs b/test/TensorFlowNET.Examples/BasicModels/NaiveBayesClassifier.cs index 36935ccb..a9104b96 100644 --- a/test/TensorFlowNET.Examples/BasicModels/NaiveBayesClassifier.cs +++ b/test/TensorFlowNET.Examples/BasicModels/NaiveBayesClassifier.cs @@ -18,69 +18,14 @@ namespace TensorFlowNET.Examples public string Name => "Naive Bayes Classifier"; public bool ImportGraph { get; set; } = false; - + public NDArray X, y; public Normal dist { get; set; } public bool Run() { - var X = np.array(new float[][] { new float[] { 5.1f, 3.5f},new float[] { 4.9f, 3.0f },new float[] { 4.7f, 3.2f }, - new float[] { 4.6f, 3.1f },new float[] { 5.0f, 3.6f },new float[] { 5.4f, 3.9f }, - new float[] { 4.6f, 3.4f },new float[] { 5.0f, 3.4f },new float[] { 4.4f, 2.9f }, - new float[] { 4.9f, 3.1f },new float[] { 5.4f, 3.7f },new float[] {4.8f, 3.4f }, - new float[] {4.8f, 3.0f },new float[] {4.3f, 3.0f },new float[] {5.8f, 4.0f }, - new float[] {5.7f, 4.4f },new float[] {5.4f, 3.9f },new float[] {5.1f, 3.5f }, - new float[] {5.7f, 3.8f },new float[] {5.1f, 3.8f },new float[] {5.4f, 3.4f }, - new float[] {5.1f, 3.7f },new float[] {5.1f, 3.3f },new float[] {4.8f, 3.4f }, - new float[] {5.0f, 3.0f },new float[] {5.0f , 3.4f },new float[] {5.2f, 3.5f }, - new float[] {5.2f, 3.4f },new float[] {4.7f, 3.2f },new float[] {4.8f, 3.1f }, - new float[] {5.4f, 3.4f },new float[] {5.2f, 4.1f},new float[] {5.5f, 4.2f }, - new float[] {4.9f, 3.1f },new float[] {5.0f , 3.2f },new float[] {5.5f, 3.5f }, - new float[] {4.9f, 3.6f },new float[] {4.4f, 3.0f },new float[] {5.1f, 3.4f }, - new float[] {5.0f , 3.5f },new float[] {4.5f, 2.3f },new float[] {4.4f, 3.2f }, - new float[] {5.0f , 3.5f },new float[] {5.1f, 3.8f },new float[] {4.8f, 3.0f}, - new float[] {5.1f, 3.8f },new float[] {4.6f, 3.2f },new float[] { 5.3f, 3.7f }, - new float[] {5.0f , 3.3f },new float[] {7.0f , 3.2f },new float[] {6.4f, 3.2f }, - new float[] {6.9f, 3.1f },new float[] {5.5f, 2.3f },new float[] {6.5f, 2.8f }, - new float[] {5.7f, 2.8f },new float[] {6.3f, 3.3f },new float[] {4.9f, 2.4f }, - new float[] {6.6f, 2.9f },new float[] {5.2f, 2.7f },new float[] {5.0f , 2.0f }, - new float[] {5.9f, 3.0f },new float[] {6.0f , 2.2f },new float[] {6.1f, 2.9f }, - new float[] {5.6f, 2.9f },new float[] {6.7f, 3.1f },new float[] {5.6f, 3.0f }, - new float[] {5.8f, 2.7f },new float[] {6.2f, 2.2f },new float[] {5.6f, 2.5f }, - new float[] {5.9f, 3.0f},new float[] {6.1f, 2.8f},new float[] {6.3f, 2.5f}, - new float[] {6.1f, 2.8f},new float[] {6.4f, 2.9f},new float[] {6.6f, 3.0f }, - new float[] {6.8f, 2.8f},new float[] {6.7f, 3.0f },new float[] {6.0f , 2.9f}, - new float[] {5.7f, 2.6f},new float[] {5.5f, 2.4f},new float[] {5.5f, 2.4f}, - new float[] {5.8f, 2.7f},new float[] {6.0f , 2.7f},new float[] {5.4f, 3.0f}, - new float[] {6.0f , 3.4f},new float[] {6.7f, 3.1f},new float[] {6.3f, 2.3f}, - new float[] {5.6f, 3.0f },new float[] {5.5f, 2.5f},new float[] {5.5f, 2.6f}, - new float[] {6.1f, 3.0f },new float[] {5.8f, 2.6f},new float[] {5.0f, 2.3f}, - new float[] {5.6f, 2.7f},new float[] {5.7f, 3.0f },new float[] {5.7f, 2.9f}, - new float[] {6.2f, 2.9f},new float[] {5.1f, 2.5f},new float[] {5.7f, 2.8f}, - new float[] {6.3f, 3.3f},new float[] {5.8f, 2.7f},new float[] {7.1f, 3.0f }, - new float[] {6.3f, 2.9f},new float[] {6.5f, 3.0f },new float[] {7.6f, 3.0f }, - new float[] {4.9f, 2.5f},new float[] {7.3f, 2.9f},new float[] {6.7f, 2.5f}, - new float[] {7.2f, 3.6f},new float[] {6.5f, 3.2f},new float[] {6.4f, 2.7f}, - new float[] {6.8f, 3.00f },new float[] {5.7f, 2.5f},new float[] {5.8f, 2.8f}, - new float[] {6.4f, 3.2f},new float[] {6.5f, 3.0f },new float[] {7.7f, 3.8f}, - new float[] {7.7f, 2.6f},new float[] {6.0f , 2.2f},new float[] {6.9f, 3.2f}, - new float[] {5.6f, 2.8f},new float[] {7.7f, 2.8f},new float[] {6.3f, 2.7f}, - new float[] {6.7f, 3.3f},new float[] {7.2f, 3.2f},new float[] {6.2f, 2.8f}, - new float[] {6.1f, 3.0f },new float[] {6.4f, 2.8f},new float[] {7.2f, 3.0f }, - new float[] {7.4f, 2.8f},new float[] {7.9f, 3.8f},new float[] {6.4f, 2.8f}, - new float[] {6.3f, 2.8f},new float[] {6.1f, 2.6f},new float[] {7.7f, 3.0f }, - new float[] {6.3f, 3.4f},new float[] {6.4f, 3.1f},new float[] {6.0f, 3.0f}, - new float[] {6.9f, 3.1f},new float[] {6.7f, 3.1f},new float[] {6.9f, 3.1f}, - new float[] {5.8f, 2.7f},new float[] {6.8f, 3.2f},new float[] {6.7f, 3.3f}, - new float[] {6.7f, 3.0f },new float[] {6.3f, 2.5f},new float[] {6.5f, 3.0f }, - new float[] {6.2f, 3.4f},new float[] {5.9f, 3.0f }, new float[] {5.8f, 3.0f }}); - - var y = np.array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, - 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2); + PrepareData(); + fit(X, y); + // Create a regular grid and classify each point float x_min = X.amin(0).Data(0) - 0.5f; float y_min = X.amin(0).Data(1) - 0.5f; @@ -88,12 +33,11 @@ namespace TensorFlowNET.Examples float y_max = X.amax(0).Data(1) + 0.5f; var (xx, yy) = np.meshgrid(np.linspace(x_min, x_max, 30), np.linspace(y_min, y_max, 30)); - var s = tf.Session(); - if (xx.dtype == typeof(float)) + with(tf.Session(), sess => { - var samples = np.hstack(xx.ravel().reshape(xx.size,1), yy.ravel().reshape(yy.size,1)); - var Z = s.run(predict(samples)); - } + var samples = np.hstack(xx.ravel().reshape(xx.size, 1), yy.ravel().reshape(yy.size, 1)); + var Z = sess.run(predict(samples)); + }); return true; } @@ -189,7 +133,42 @@ namespace TensorFlowNET.Examples public void PrepareData() { - + #region Training data + X = np.array(new float[,] { + {5.1f, 3.5f}, {4.9f, 3.0f}, {4.7f, 3.2f}, {4.6f, 3.1f}, {5.0f, 3.6f}, {5.4f, 3.9f}, + {4.6f, 3.4f}, {5.0f, 3.4f}, {4.4f, 2.9f}, {4.9f, 3.1f}, {5.4f, 3.7f}, {4.8f, 3.4f}, + {4.8f, 3.0f}, {4.3f, 3.0f}, {5.8f, 4.0f}, {5.7f, 4.4f}, {5.4f, 3.9f}, {5.1f, 3.5f}, + {5.7f, 3.8f}, {5.1f, 3.8f}, {5.4f, 3.4f}, {5.1f, 3.7f}, {5.1f, 3.3f}, {4.8f, 3.4f}, + {5.0f, 3.0f}, {5.0f, 3.4f}, {5.2f, 3.5f}, {5.2f, 3.4f}, {4.7f, 3.2f}, {4.8f, 3.1f}, + {5.4f, 3.4f}, {5.2f, 4.1f}, {5.5f, 4.2f}, {4.9f, 3.1f}, {5.0f, 3.2f}, {5.5f, 3.5f}, + {4.9f, 3.6f}, {4.4f, 3.0f}, {5.1f, 3.4f}, {5.0f, 3.5f}, {4.5f, 2.3f}, {4.4f, 3.2f}, + {5.0f, 3.5f}, {5.1f, 3.8f}, {4.8f, 3.0f}, {5.1f, 3.8f}, {4.6f, 3.2f}, {5.3f, 3.7f}, + {5.0f, 3.3f}, {7.0f, 3.2f}, {6.4f, 3.2f}, {6.9f, 3.1f}, {5.5f, 2.3f}, {6.5f, 2.8f}, + {5.7f, 2.8f}, {6.3f, 3.3f}, {4.9f, 2.4f}, {6.6f, 2.9f}, {5.2f, 2.7f}, {5.0f, 2.0f}, + {5.9f, 3.0f}, {6.0f, 2.2f}, {6.1f, 2.9f}, {5.6f, 2.9f}, {6.7f, 3.1f}, {5.6f, 3.0f}, + {5.8f, 2.7f}, {6.2f, 2.2f}, {5.6f, 2.5f}, {5.9f, 3.0f}, {6.1f, 2.8f}, {6.3f, 2.5f}, + {6.1f, 2.8f}, {6.4f, 2.9f}, {6.6f, 3.0f}, {6.8f, 2.8f}, {6.7f, 3.0f}, {6.0f, 2.9f}, + {5.7f, 2.6f}, {5.5f, 2.4f}, {5.5f, 2.4f}, {5.8f, 2.7f}, {6.0f, 2.7f}, {5.4f, 3.0f}, + {6.0f, 3.4f}, {6.7f, 3.1f}, {6.3f, 2.3f}, {5.6f, 3.0f}, {5.5f, 2.5f}, {5.5f, 2.6f}, + {6.1f, 3.0f}, {5.8f, 2.6f}, {5.0f, 2.3f}, {5.6f, 2.7f}, {5.7f, 3.0f}, {5.7f, 2.9f}, + {6.2f, 2.9f}, {5.1f, 2.5f}, {5.7f, 2.8f}, {6.3f, 3.3f}, {5.8f, 2.7f}, {7.1f, 3.0f}, + {6.3f, 2.9f}, {6.5f, 3.0f}, {7.6f, 3.0f}, {4.9f, 2.5f}, {7.3f, 2.9f}, {6.7f, 2.5f}, + {7.2f, 3.6f}, {6.5f, 3.2f}, {6.4f, 2.7f}, {6.8f, 3.0f}, {5.7f, 2.5f}, {5.8f, 2.8f}, + {6.4f, 3.2f}, {6.5f, 3.0f}, {7.7f, 3.8f}, {7.7f, 2.6f}, {6.0f, 2.2f}, {6.9f, 3.2f}, + {5.6f, 2.8f}, {7.7f, 2.8f}, {6.3f, 2.7f}, {6.7f, 3.3f}, {7.2f, 3.2f}, {6.2f, 2.8f}, + {6.1f, 3.0f}, {6.4f, 2.8f}, {7.2f, 3.0f}, {7.4f, 2.8f}, {7.9f, 3.8f}, {6.4f, 2.8f}, + {6.3f, 2.8f}, {6.1f, 2.6f}, {7.7f, 3.0f}, {6.3f, 3.4f}, {6.4f, 3.1f}, {6.0f, 3.0f}, + {6.9f, 3.1f}, {6.7f, 3.1f}, {6.9f, 3.1f}, {5.8f, 2.7f}, {6.8f, 3.2f}, {6.7f, 3.3f}, + {6.7f, 3.0f}, {6.3f, 2.5f}, {6.5f, 3.0f}, {6.2f, 3.4f}, {5.9f, 3.0f}, {5.8f, 3.0f}}); + + y = np.array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2); + #endregion } } } diff --git a/test/TensorFlowNET.Examples/TextProcess/BinaryTextClassification.cs b/test/TensorFlowNET.Examples/TextProcess/BinaryTextClassification.cs index 7f7cc13d..63c3aefc 100644 --- a/test/TensorFlowNET.Examples/TextProcess/BinaryTextClassification.cs +++ b/test/TensorFlowNET.Examples/TextProcess/BinaryTextClassification.cs @@ -30,12 +30,12 @@ namespace TensorFlowNET.Examples { PrepareData(); - Console.WriteLine($"Training entries: {train_data.size}, labels: {train_labels.size}"); + Console.WriteLine($"Training entries: {train_data.len}, labels: {train_labels.len}"); // A dictionary mapping words to an integer index var word_index = GetWordIndex(); - train_data = keras.preprocessing.sequence.pad_sequences(train_data, + /*train_data = keras.preprocessing.sequence.pad_sequences(train_data, value: word_index[""], padding: "post", maxlen: 256); @@ -43,13 +43,14 @@ namespace TensorFlowNET.Examples test_data = keras.preprocessing.sequence.pad_sequences(test_data, value: word_index[""], padding: "post", - maxlen: 256); + maxlen: 256);*/ // input shape is the vocabulary count used for the movie reviews (10,000 words) int vocab_size = 10000; var model = keras.Sequential(); - model.add(keras.layers.Embedding(vocab_size, 16)); + var layer = keras.layers.Embedding(vocab_size, 16); + model.add(layer); return false; } @@ -78,17 +79,23 @@ namespace TensorFlowNET.Examples labels_test = labels_test[indices_test]; // not completed - var xs = x_train.hstack(x_test); + var xs = x_train.hstack(x_test); var labels = labels_train.hstack(labels_test); var idx = x_train.size; var y_train = labels_train; var y_test = labels_test; - x_train = train_data; - train_labels = y_train; + // convert x_train + train_data = new NDArray(np.int32, (x_train.size, 256)); + for (int i = 0; i < x_train.size; i++) + train_data[i] = x_train[i].Data(0).Split(',').Select(x => int.Parse(x)).ToArray(); - test_data = x_test; + test_data = new NDArray(np.int32, (x_test.size, 256)); + for (int i = 0; i < x_test.size; i++) + test_data[i] = x_test[i].Data(0).Split(',').Select(x => int.Parse(x)).ToArray(); + + train_labels = y_train; test_labels = y_test; }