| @@ -69,7 +69,8 @@ namespace Tensorflow | |||||
| private List<Tensor> _unfeedable_tensors = new List<Tensor>(); | private List<Tensor> _unfeedable_tensors = new List<Tensor>(); | ||||
| public string _name_stack = ""; | public string _name_stack = ""; | ||||
| public string _graph_key; | |||||
| private string _graph_key; | |||||
| public string graph_key => _graph_key; | |||||
| public string _last_loss_reduction; | public string _last_loss_reduction; | ||||
| public Status Status { get; } | public Status Status { get; } | ||||
| @@ -19,6 +19,10 @@ namespace Tensorflow.Keras.Engine | |||||
| } | } | ||||
| /// <summary> | |||||
| /// Adds a layer instance on top of the layer stack. | |||||
| /// </summary> | |||||
| /// <param name="layer"></param> | |||||
| public void add(Layer layer) | public void add(Layer layer) | ||||
| { | { | ||||
| built = false; | built = false; | ||||
| @@ -32,7 +36,7 @@ namespace Tensorflow.Keras.Engine | |||||
| var x = keras.layers.Input( | var x = keras.layers.Input( | ||||
| batch_shape: batch_shape, | batch_shape: batch_shape, | ||||
| dtype: dtype, | dtype: dtype, | ||||
| name: layer._name + "_input"); | |||||
| name: layer.name + "_input"); | |||||
| // This will build the current layer | // This will build the current layer | ||||
| // and create the node connecting the current layer | // and create the node connecting the current layer | ||||
| @@ -4,6 +4,7 @@ using System.Linq; | |||||
| using System.Text; | using System.Text; | ||||
| using Tensorflow.Keras.Engine; | using Tensorflow.Keras.Engine; | ||||
| using Tensorflow.Keras.Utils; | using Tensorflow.Keras.Utils; | ||||
| using static Tensorflow.Python; | |||||
| namespace Tensorflow.Keras.Layers | namespace Tensorflow.Keras.Layers | ||||
| { | { | ||||
| @@ -33,7 +34,8 @@ namespace Tensorflow.Keras.Layers | |||||
| protected InputSpec input_spec; | protected InputSpec input_spec; | ||||
| protected bool supports_masking; | protected bool supports_masking; | ||||
| protected List<RefVariable> _trainable_weights; | protected List<RefVariable> _trainable_weights; | ||||
| public string _name; | |||||
| private string _name; | |||||
| public string name => _name; | |||||
| protected string _base_name; | protected string _base_name; | ||||
| protected bool _compute_previous_mask; | protected bool _compute_previous_mask; | ||||
| protected List<Operation> _updates; | protected List<Operation> _updates; | ||||
| @@ -85,17 +87,24 @@ namespace Tensorflow.Keras.Layers | |||||
| // Handle Keras mask propagation from previous layer to current layer. | // Handle Keras mask propagation from previous layer to current layer. | ||||
| Python.with(ops.name_scope(_name_scope()), delegate | Python.with(ops.name_scope(_name_scope()), delegate | ||||
| { | { | ||||
| if (!built) | |||||
| /*if (!built) | |||||
| { | { | ||||
| _maybe_build(inputs); | _maybe_build(inputs); | ||||
| built = true; | built = true; | ||||
| } | |||||
| }*/ | |||||
| if (build_graph) | if (build_graph) | ||||
| { | { | ||||
| // Symbolic execution on symbolic tensors. We will attempt to build | // Symbolic execution on symbolic tensors. We will attempt to build | ||||
| // the corresponding TF subgraph inside `backend.get_graph()` | // the corresponding TF subgraph inside `backend.get_graph()` | ||||
| var graph = backend.get_graph(); | |||||
| var graph = backend.get_graph().as_default(); | |||||
| with(ops.name_scope(_name_scope()), delegate | |||||
| { | |||||
| // Build layer if applicable (if the `build` method has been | |||||
| // overridden). | |||||
| _maybe_build(inputs[0]); | |||||
| }); | |||||
| outputs = call(inputs[0], training: training); | outputs = call(inputs[0], training: training); | ||||
| _handle_activity_regularization(inputs[0], outputs); | _handle_activity_regularization(inputs[0], outputs); | ||||
| _set_mask_metadata(inputs[0], outputs, null); | _set_mask_metadata(inputs[0], outputs, null); | ||||
| @@ -130,13 +139,17 @@ namespace Tensorflow.Keras.Layers | |||||
| protected virtual string _name_scope() | protected virtual string _name_scope() | ||||
| { | { | ||||
| return null; | |||||
| return name; | |||||
| } | } | ||||
| protected void _maybe_build(Tensor[] inputs) | |||||
| protected void _maybe_build(Tensor input) | |||||
| { | { | ||||
| var input_list = inputs; | |||||
| build(input_list[0].GetShape()); | |||||
| // Check input assumptions set before layer building, e.g. input rank. | |||||
| if (built) | |||||
| return; | |||||
| build(input.GetShape()); | |||||
| built = true; | |||||
| } | } | ||||
| protected virtual void build(TensorShape input_shape) | protected virtual void build(TensorShape input_shape) | ||||
| @@ -160,7 +173,7 @@ namespace Tensorflow.Keras.Layers | |||||
| var variable = _add_variable_with_custom_getter(name, | var variable = _add_variable_with_custom_getter(name, | ||||
| shape, | shape, | ||||
| dtype: dtype, | dtype: dtype, | ||||
| getter: getter == null ? base_layer_utils.make_variable : getter, | |||||
| //getter: getter == null ? base_layer_utils.make_variable : getter, | |||||
| overwrite: true, | overwrite: true, | ||||
| initializer: initializer, | initializer: initializer, | ||||
| trainable: trainable.Value); | trainable: trainable.Value); | ||||
| @@ -176,12 +189,12 @@ namespace Tensorflow.Keras.Layers | |||||
| _updates.AddRange(updates_op); | _updates.AddRange(updates_op); | ||||
| } | } | ||||
| protected virtual void _init_set_name(string name) | |||||
| protected virtual void _init_set_name(string name, bool zero_based = true) | |||||
| { | { | ||||
| string base_name = name; | |||||
| if (name == null) | if (name == null) | ||||
| (_name, base_name) = _make_unique_name(); | |||||
| _base_name = base_name; | |||||
| _name = base_layer_utils.unique_layer_name(generic_utils.to_snake_case(this.GetType().Name), zero_based: zero_based); | |||||
| else | |||||
| _name = name; | |||||
| } | } | ||||
| protected virtual (string, string) _make_unique_name() | protected virtual (string, string) _make_unique_name() | ||||
| @@ -30,26 +30,6 @@ namespace Tensorflow.Keras | |||||
| object value = null) | object value = null) | ||||
| { | { | ||||
| int[] length = new int[sequences.size]; | int[] length = new int[sequences.size]; | ||||
| switch (sequences.dtype.Name) | |||||
| { | |||||
| case "Object": | |||||
| for (int i = 0; i < sequences.size; i++) | |||||
| { | |||||
| switch (sequences.Data<object>(i)) | |||||
| { | |||||
| case string data: | |||||
| length[i] = Regex.Matches(data, ",").Count; | |||||
| break; | |||||
| } | |||||
| } | |||||
| break; | |||||
| case "Int32": | |||||
| for (int i = 0; i < sequences.size; i++) | |||||
| length[i] = Regex.Matches(sequences.Data<object>(i).ToString(), ",").Count; | |||||
| break; | |||||
| default: | |||||
| throw new NotImplementedException($"pad_sequences: {sequences.dtype.Name}"); | |||||
| } | |||||
| if (maxlen == null) | if (maxlen == null) | ||||
| maxlen = length.Max(); | maxlen = length.Max(); | ||||
| @@ -1,18 +1,39 @@ | |||||
| using System; | using System; | ||||
| using System.Collections.Generic; | using System.Collections.Generic; | ||||
| using System.Linq; | |||||
| using System.Text; | using System.Text; | ||||
| using static Tensorflow.Python; | |||||
| namespace Tensorflow.Keras.Utils | namespace Tensorflow.Keras.Utils | ||||
| { | { | ||||
| public class base_layer_utils | public class base_layer_utils | ||||
| { | { | ||||
| /// <summary> | |||||
| /// Adds a new variable to the layer. | |||||
| /// </summary> | |||||
| /// <param name="name"></param> | |||||
| /// <param name="shape"></param> | |||||
| /// <param name="dtype"></param> | |||||
| /// <param name="initializer"></param> | |||||
| /// <param name="trainable"></param> | |||||
| /// <returns></returns> | |||||
| public static RefVariable make_variable(string name, | public static RefVariable make_variable(string name, | ||||
| int[] shape, | int[] shape, | ||||
| TF_DataType dtype = TF_DataType.TF_FLOAT, | TF_DataType dtype = TF_DataType.TF_FLOAT, | ||||
| IInitializer initializer = null, | IInitializer initializer = null, | ||||
| bool trainable = false) | |||||
| bool trainable = true, | |||||
| bool use_resource = true) | |||||
| { | { | ||||
| throw new NotImplementedException(""); | |||||
| var initializing_from_value = false; | |||||
| ops.init_scope(); | |||||
| Func<Tensor> init_val = ()=> initializer.call(new TensorShape(shape), dtype: dtype); | |||||
| var variable_dtype = dtype.as_base_dtype(); | |||||
| var v = tf.Variable(init_val); | |||||
| return v; | |||||
| } | } | ||||
| /// <summary> | /// <summary> | ||||
| @@ -20,16 +41,56 @@ namespace Tensorflow.Keras.Utils | |||||
| /// </summary> | /// </summary> | ||||
| /// <param name="name"></param> | /// <param name="name"></param> | ||||
| /// <returns></returns> | /// <returns></returns> | ||||
| public static string unique_layer_name(string name) | |||||
| public static string unique_layer_name(string name, Dictionary<(string, string), int> name_uid_map = null, | |||||
| string[] avoid_names = null, string @namespace = "", bool zero_based = false) | |||||
| { | { | ||||
| int number = get_default_graph_uid_map(); | |||||
| return $"{name}_{number}"; | |||||
| if(name_uid_map == null) | |||||
| name_uid_map = get_default_graph_uid_map(); | |||||
| if (avoid_names == null) | |||||
| avoid_names = new string[0]; | |||||
| string proposed_name = null; | |||||
| while(proposed_name == null || avoid_names.Contains(proposed_name)) | |||||
| { | |||||
| var name_key = (@namespace, name); | |||||
| if (!name_uid_map.ContainsKey(name_key)) | |||||
| name_uid_map[name_key] = 0; | |||||
| if (zero_based) | |||||
| { | |||||
| int number = name_uid_map[name_key]; | |||||
| if (number > 0) | |||||
| proposed_name = $"{name}_{number}"; | |||||
| else | |||||
| proposed_name = name; | |||||
| name_uid_map[name_key] += 1; | |||||
| } | |||||
| else | |||||
| { | |||||
| name_uid_map[name_key] += 1; | |||||
| proposed_name = $"{name}_{name_uid_map[name_key]}"; | |||||
| } | |||||
| } | |||||
| return proposed_name; | |||||
| } | } | ||||
| public static int get_default_graph_uid_map() | |||||
| public static Dictionary<(string, string), int> get_default_graph_uid_map() | |||||
| { | { | ||||
| var graph = ops.get_default_graph(); | var graph = ops.get_default_graph(); | ||||
| return graph._next_id(); | |||||
| Dictionary<(string, string), int> name_uid_map = null; | |||||
| if (backend.PER_GRAPH_LAYER_NAME_UIDS.ContainsKey(graph.graph_key)) | |||||
| { | |||||
| name_uid_map = backend.PER_GRAPH_LAYER_NAME_UIDS[graph.graph_key]; | |||||
| } | |||||
| else | |||||
| { | |||||
| name_uid_map = new Dictionary<(string, string), int>(); | |||||
| backend.PER_GRAPH_LAYER_NAME_UIDS[graph.graph_key] = name_uid_map; | |||||
| } | |||||
| return name_uid_map; | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -6,6 +6,13 @@ namespace Tensorflow.Keras | |||||
| { | { | ||||
| public class backend | public class backend | ||||
| { | { | ||||
| /// <summary> | |||||
| /// A global dictionary mapping graph objects to an index of counters used | |||||
| /// for various layer names in each graph. | |||||
| /// Allows to give unique autogenerated names to layers, in a graph-specific way. | |||||
| /// </summary> | |||||
| public static Dictionary<string, Dictionary<(string, string), int>> PER_GRAPH_LAYER_NAME_UIDS = new Dictionary<string, Dictionary<(string, string), int>>(); | |||||
| public static void track_variable(RefVariable v) | public static void track_variable(RefVariable v) | ||||
| { | { | ||||
| @@ -1,34 +1,12 @@ | |||||
| using System; | using System; | ||||
| using System.Collections.Generic; | using System.Collections.Generic; | ||||
| using System.Text; | using System.Text; | ||||
| using Tensorflow.Train; | |||||
| namespace Tensorflow | namespace Tensorflow | ||||
| { | { | ||||
| public abstract class CheckpointableBase | |||||
| public abstract class CheckpointableBase : Trackable | |||||
| { | { | ||||
| /// <summary> | |||||
| /// Restore-on-create for a variable be saved with this `Checkpointable`. | |||||
| /// </summary> | |||||
| /// <returns></returns> | |||||
| protected virtual RefVariable _add_variable_with_custom_getter(string name, | |||||
| int[] shape, | |||||
| TF_DataType dtype = TF_DataType.TF_FLOAT, | |||||
| IInitializer initializer = null, | |||||
| Func<string, int[], TF_DataType, IInitializer, bool, RefVariable> getter = null, | |||||
| bool overwrite = false, | |||||
| bool trainable = false) | |||||
| { | |||||
| var new_variable = getter(name, shape, dtype, initializer, trainable); | |||||
| if (!overwrite || new_variable is RefVariable) | |||||
| return _track_checkpointable(new_variable, name: name, | |||||
| overwrite: overwrite); | |||||
| else | |||||
| return new_variable; | |||||
| } | |||||
| protected RefVariable _track_checkpointable(RefVariable checkpointable, string name, bool overwrite = false) | |||||
| { | |||||
| return checkpointable; | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| @@ -0,0 +1,34 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Train | |||||
| { | |||||
| public abstract class Trackable | |||||
| { | |||||
| /// <summary> | |||||
| /// Restore-on-create for a variable be saved with this `Checkpointable`. | |||||
| /// </summary> | |||||
| /// <returns></returns> | |||||
| protected virtual RefVariable _add_variable_with_custom_getter(string name, | |||||
| int[] shape, | |||||
| TF_DataType dtype = TF_DataType.TF_FLOAT, | |||||
| IInitializer initializer = null, | |||||
| Func<string, int[], TF_DataType, IInitializer, bool, RefVariable> getter = null, | |||||
| bool overwrite = false, | |||||
| bool trainable = false) | |||||
| { | |||||
| var new_variable = getter(name, shape, dtype, initializer, trainable); | |||||
| if (!overwrite || new_variable is RefVariable) | |||||
| return _track_checkpointable(new_variable, name: name, | |||||
| overwrite: overwrite); | |||||
| else | |||||
| return new_variable; | |||||
| } | |||||
| protected RefVariable _track_checkpointable(RefVariable checkpointable, string name, bool overwrite = false) | |||||
| { | |||||
| return checkpointable; | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -111,7 +111,7 @@ namespace Tensorflow | |||||
| // Store the graph key so optimizers know how to only retrieve variables from | // Store the graph key so optimizers know how to only retrieve variables from | ||||
| // this graph. | // this graph. | ||||
| _graph_key = ops.get_default_graph()._graph_key; | |||||
| _graph_key = ops.get_default_graph().graph_key; | |||||
| _trainable = trainable; | _trainable = trainable; | ||||
| if (trainable && !collections.Contains(ops.GraphKeys.TRAINABLE_VARIABLES)) | if (trainable && !collections.Contains(ops.GraphKeys.TRAINABLE_VARIABLES)) | ||||
| @@ -18,69 +18,14 @@ namespace TensorFlowNET.Examples | |||||
| public string Name => "Naive Bayes Classifier"; | public string Name => "Naive Bayes Classifier"; | ||||
| public bool ImportGraph { get; set; } = false; | public bool ImportGraph { get; set; } = false; | ||||
| public NDArray X, y; | |||||
| public Normal dist { get; set; } | public Normal dist { get; set; } | ||||
| public bool Run() | public bool Run() | ||||
| { | { | ||||
| var X = np.array(new float[][] { new float[] { 5.1f, 3.5f},new float[] { 4.9f, 3.0f },new float[] { 4.7f, 3.2f }, | |||||
| new float[] { 4.6f, 3.1f },new float[] { 5.0f, 3.6f },new float[] { 5.4f, 3.9f }, | |||||
| new float[] { 4.6f, 3.4f },new float[] { 5.0f, 3.4f },new float[] { 4.4f, 2.9f }, | |||||
| new float[] { 4.9f, 3.1f },new float[] { 5.4f, 3.7f },new float[] {4.8f, 3.4f }, | |||||
| new float[] {4.8f, 3.0f },new float[] {4.3f, 3.0f },new float[] {5.8f, 4.0f }, | |||||
| new float[] {5.7f, 4.4f },new float[] {5.4f, 3.9f },new float[] {5.1f, 3.5f }, | |||||
| new float[] {5.7f, 3.8f },new float[] {5.1f, 3.8f },new float[] {5.4f, 3.4f }, | |||||
| new float[] {5.1f, 3.7f },new float[] {5.1f, 3.3f },new float[] {4.8f, 3.4f }, | |||||
| new float[] {5.0f, 3.0f },new float[] {5.0f , 3.4f },new float[] {5.2f, 3.5f }, | |||||
| new float[] {5.2f, 3.4f },new float[] {4.7f, 3.2f },new float[] {4.8f, 3.1f }, | |||||
| new float[] {5.4f, 3.4f },new float[] {5.2f, 4.1f},new float[] {5.5f, 4.2f }, | |||||
| new float[] {4.9f, 3.1f },new float[] {5.0f , 3.2f },new float[] {5.5f, 3.5f }, | |||||
| new float[] {4.9f, 3.6f },new float[] {4.4f, 3.0f },new float[] {5.1f, 3.4f }, | |||||
| new float[] {5.0f , 3.5f },new float[] {4.5f, 2.3f },new float[] {4.4f, 3.2f }, | |||||
| new float[] {5.0f , 3.5f },new float[] {5.1f, 3.8f },new float[] {4.8f, 3.0f}, | |||||
| new float[] {5.1f, 3.8f },new float[] {4.6f, 3.2f },new float[] { 5.3f, 3.7f }, | |||||
| new float[] {5.0f , 3.3f },new float[] {7.0f , 3.2f },new float[] {6.4f, 3.2f }, | |||||
| new float[] {6.9f, 3.1f },new float[] {5.5f, 2.3f },new float[] {6.5f, 2.8f }, | |||||
| new float[] {5.7f, 2.8f },new float[] {6.3f, 3.3f },new float[] {4.9f, 2.4f }, | |||||
| new float[] {6.6f, 2.9f },new float[] {5.2f, 2.7f },new float[] {5.0f , 2.0f }, | |||||
| new float[] {5.9f, 3.0f },new float[] {6.0f , 2.2f },new float[] {6.1f, 2.9f }, | |||||
| new float[] {5.6f, 2.9f },new float[] {6.7f, 3.1f },new float[] {5.6f, 3.0f }, | |||||
| new float[] {5.8f, 2.7f },new float[] {6.2f, 2.2f },new float[] {5.6f, 2.5f }, | |||||
| new float[] {5.9f, 3.0f},new float[] {6.1f, 2.8f},new float[] {6.3f, 2.5f}, | |||||
| new float[] {6.1f, 2.8f},new float[] {6.4f, 2.9f},new float[] {6.6f, 3.0f }, | |||||
| new float[] {6.8f, 2.8f},new float[] {6.7f, 3.0f },new float[] {6.0f , 2.9f}, | |||||
| new float[] {5.7f, 2.6f},new float[] {5.5f, 2.4f},new float[] {5.5f, 2.4f}, | |||||
| new float[] {5.8f, 2.7f},new float[] {6.0f , 2.7f},new float[] {5.4f, 3.0f}, | |||||
| new float[] {6.0f , 3.4f},new float[] {6.7f, 3.1f},new float[] {6.3f, 2.3f}, | |||||
| new float[] {5.6f, 3.0f },new float[] {5.5f, 2.5f},new float[] {5.5f, 2.6f}, | |||||
| new float[] {6.1f, 3.0f },new float[] {5.8f, 2.6f},new float[] {5.0f, 2.3f}, | |||||
| new float[] {5.6f, 2.7f},new float[] {5.7f, 3.0f },new float[] {5.7f, 2.9f}, | |||||
| new float[] {6.2f, 2.9f},new float[] {5.1f, 2.5f},new float[] {5.7f, 2.8f}, | |||||
| new float[] {6.3f, 3.3f},new float[] {5.8f, 2.7f},new float[] {7.1f, 3.0f }, | |||||
| new float[] {6.3f, 2.9f},new float[] {6.5f, 3.0f },new float[] {7.6f, 3.0f }, | |||||
| new float[] {4.9f, 2.5f},new float[] {7.3f, 2.9f},new float[] {6.7f, 2.5f}, | |||||
| new float[] {7.2f, 3.6f},new float[] {6.5f, 3.2f},new float[] {6.4f, 2.7f}, | |||||
| new float[] {6.8f, 3.00f },new float[] {5.7f, 2.5f},new float[] {5.8f, 2.8f}, | |||||
| new float[] {6.4f, 3.2f},new float[] {6.5f, 3.0f },new float[] {7.7f, 3.8f}, | |||||
| new float[] {7.7f, 2.6f},new float[] {6.0f , 2.2f},new float[] {6.9f, 3.2f}, | |||||
| new float[] {5.6f, 2.8f},new float[] {7.7f, 2.8f},new float[] {6.3f, 2.7f}, | |||||
| new float[] {6.7f, 3.3f},new float[] {7.2f, 3.2f},new float[] {6.2f, 2.8f}, | |||||
| new float[] {6.1f, 3.0f },new float[] {6.4f, 2.8f},new float[] {7.2f, 3.0f }, | |||||
| new float[] {7.4f, 2.8f},new float[] {7.9f, 3.8f},new float[] {6.4f, 2.8f}, | |||||
| new float[] {6.3f, 2.8f},new float[] {6.1f, 2.6f},new float[] {7.7f, 3.0f }, | |||||
| new float[] {6.3f, 3.4f},new float[] {6.4f, 3.1f},new float[] {6.0f, 3.0f}, | |||||
| new float[] {6.9f, 3.1f},new float[] {6.7f, 3.1f},new float[] {6.9f, 3.1f}, | |||||
| new float[] {5.8f, 2.7f},new float[] {6.8f, 3.2f},new float[] {6.7f, 3.3f}, | |||||
| new float[] {6.7f, 3.0f },new float[] {6.3f, 2.5f},new float[] {6.5f, 3.0f }, | |||||
| new float[] {6.2f, 3.4f},new float[] {5.9f, 3.0f }, new float[] {5.8f, 3.0f }}); | |||||
| var y = np.array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |||||
| 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |||||
| 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, | |||||
| 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, | |||||
| 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, | |||||
| 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, | |||||
| 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2); | |||||
| PrepareData(); | |||||
| fit(X, y); | fit(X, y); | ||||
| // Create a regular grid and classify each point | // Create a regular grid and classify each point | ||||
| float x_min = X.amin(0).Data<float>(0) - 0.5f; | float x_min = X.amin(0).Data<float>(0) - 0.5f; | ||||
| float y_min = X.amin(0).Data<float>(1) - 0.5f; | float y_min = X.amin(0).Data<float>(1) - 0.5f; | ||||
| @@ -88,12 +33,11 @@ namespace TensorFlowNET.Examples | |||||
| float y_max = X.amax(0).Data<float>(1) + 0.5f; | float y_max = X.amax(0).Data<float>(1) + 0.5f; | ||||
| var (xx, yy) = np.meshgrid(np.linspace(x_min, x_max, 30), np.linspace(y_min, y_max, 30)); | var (xx, yy) = np.meshgrid(np.linspace(x_min, x_max, 30), np.linspace(y_min, y_max, 30)); | ||||
| var s = tf.Session(); | |||||
| if (xx.dtype == typeof(float)) | |||||
| with(tf.Session(), sess => | |||||
| { | { | ||||
| var samples = np.hstack<float>(xx.ravel().reshape(xx.size,1), yy.ravel().reshape(yy.size,1)); | |||||
| var Z = s.run(predict(samples)); | |||||
| } | |||||
| var samples = np.hstack<float>(xx.ravel().reshape(xx.size, 1), yy.ravel().reshape(yy.size, 1)); | |||||
| var Z = sess.run(predict(samples)); | |||||
| }); | |||||
| return true; | return true; | ||||
| } | } | ||||
| @@ -189,7 +133,42 @@ namespace TensorFlowNET.Examples | |||||
| public void PrepareData() | public void PrepareData() | ||||
| { | { | ||||
| #region Training data | |||||
| X = np.array(new float[,] { | |||||
| {5.1f, 3.5f}, {4.9f, 3.0f}, {4.7f, 3.2f}, {4.6f, 3.1f}, {5.0f, 3.6f}, {5.4f, 3.9f}, | |||||
| {4.6f, 3.4f}, {5.0f, 3.4f}, {4.4f, 2.9f}, {4.9f, 3.1f}, {5.4f, 3.7f}, {4.8f, 3.4f}, | |||||
| {4.8f, 3.0f}, {4.3f, 3.0f}, {5.8f, 4.0f}, {5.7f, 4.4f}, {5.4f, 3.9f}, {5.1f, 3.5f}, | |||||
| {5.7f, 3.8f}, {5.1f, 3.8f}, {5.4f, 3.4f}, {5.1f, 3.7f}, {5.1f, 3.3f}, {4.8f, 3.4f}, | |||||
| {5.0f, 3.0f}, {5.0f, 3.4f}, {5.2f, 3.5f}, {5.2f, 3.4f}, {4.7f, 3.2f}, {4.8f, 3.1f}, | |||||
| {5.4f, 3.4f}, {5.2f, 4.1f}, {5.5f, 4.2f}, {4.9f, 3.1f}, {5.0f, 3.2f}, {5.5f, 3.5f}, | |||||
| {4.9f, 3.6f}, {4.4f, 3.0f}, {5.1f, 3.4f}, {5.0f, 3.5f}, {4.5f, 2.3f}, {4.4f, 3.2f}, | |||||
| {5.0f, 3.5f}, {5.1f, 3.8f}, {4.8f, 3.0f}, {5.1f, 3.8f}, {4.6f, 3.2f}, {5.3f, 3.7f}, | |||||
| {5.0f, 3.3f}, {7.0f, 3.2f}, {6.4f, 3.2f}, {6.9f, 3.1f}, {5.5f, 2.3f}, {6.5f, 2.8f}, | |||||
| {5.7f, 2.8f}, {6.3f, 3.3f}, {4.9f, 2.4f}, {6.6f, 2.9f}, {5.2f, 2.7f}, {5.0f, 2.0f}, | |||||
| {5.9f, 3.0f}, {6.0f, 2.2f}, {6.1f, 2.9f}, {5.6f, 2.9f}, {6.7f, 3.1f}, {5.6f, 3.0f}, | |||||
| {5.8f, 2.7f}, {6.2f, 2.2f}, {5.6f, 2.5f}, {5.9f, 3.0f}, {6.1f, 2.8f}, {6.3f, 2.5f}, | |||||
| {6.1f, 2.8f}, {6.4f, 2.9f}, {6.6f, 3.0f}, {6.8f, 2.8f}, {6.7f, 3.0f}, {6.0f, 2.9f}, | |||||
| {5.7f, 2.6f}, {5.5f, 2.4f}, {5.5f, 2.4f}, {5.8f, 2.7f}, {6.0f, 2.7f}, {5.4f, 3.0f}, | |||||
| {6.0f, 3.4f}, {6.7f, 3.1f}, {6.3f, 2.3f}, {5.6f, 3.0f}, {5.5f, 2.5f}, {5.5f, 2.6f}, | |||||
| {6.1f, 3.0f}, {5.8f, 2.6f}, {5.0f, 2.3f}, {5.6f, 2.7f}, {5.7f, 3.0f}, {5.7f, 2.9f}, | |||||
| {6.2f, 2.9f}, {5.1f, 2.5f}, {5.7f, 2.8f}, {6.3f, 3.3f}, {5.8f, 2.7f}, {7.1f, 3.0f}, | |||||
| {6.3f, 2.9f}, {6.5f, 3.0f}, {7.6f, 3.0f}, {4.9f, 2.5f}, {7.3f, 2.9f}, {6.7f, 2.5f}, | |||||
| {7.2f, 3.6f}, {6.5f, 3.2f}, {6.4f, 2.7f}, {6.8f, 3.0f}, {5.7f, 2.5f}, {5.8f, 2.8f}, | |||||
| {6.4f, 3.2f}, {6.5f, 3.0f}, {7.7f, 3.8f}, {7.7f, 2.6f}, {6.0f, 2.2f}, {6.9f, 3.2f}, | |||||
| {5.6f, 2.8f}, {7.7f, 2.8f}, {6.3f, 2.7f}, {6.7f, 3.3f}, {7.2f, 3.2f}, {6.2f, 2.8f}, | |||||
| {6.1f, 3.0f}, {6.4f, 2.8f}, {7.2f, 3.0f}, {7.4f, 2.8f}, {7.9f, 3.8f}, {6.4f, 2.8f}, | |||||
| {6.3f, 2.8f}, {6.1f, 2.6f}, {7.7f, 3.0f}, {6.3f, 3.4f}, {6.4f, 3.1f}, {6.0f, 3.0f}, | |||||
| {6.9f, 3.1f}, {6.7f, 3.1f}, {6.9f, 3.1f}, {5.8f, 2.7f}, {6.8f, 3.2f}, {6.7f, 3.3f}, | |||||
| {6.7f, 3.0f}, {6.3f, 2.5f}, {6.5f, 3.0f}, {6.2f, 3.4f}, {5.9f, 3.0f}, {5.8f, 3.0f}}); | |||||
| y = np.array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |||||
| 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, | |||||
| 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, | |||||
| 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, | |||||
| 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, | |||||
| 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, | |||||
| 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2); | |||||
| #endregion | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -30,12 +30,12 @@ namespace TensorFlowNET.Examples | |||||
| { | { | ||||
| PrepareData(); | PrepareData(); | ||||
| Console.WriteLine($"Training entries: {train_data.size}, labels: {train_labels.size}"); | |||||
| Console.WriteLine($"Training entries: {train_data.len}, labels: {train_labels.len}"); | |||||
| // A dictionary mapping words to an integer index | // A dictionary mapping words to an integer index | ||||
| var word_index = GetWordIndex(); | var word_index = GetWordIndex(); | ||||
| train_data = keras.preprocessing.sequence.pad_sequences(train_data, | |||||
| /*train_data = keras.preprocessing.sequence.pad_sequences(train_data, | |||||
| value: word_index["<PAD>"], | value: word_index["<PAD>"], | ||||
| padding: "post", | padding: "post", | ||||
| maxlen: 256); | maxlen: 256); | ||||
| @@ -43,13 +43,14 @@ namespace TensorFlowNET.Examples | |||||
| test_data = keras.preprocessing.sequence.pad_sequences(test_data, | test_data = keras.preprocessing.sequence.pad_sequences(test_data, | ||||
| value: word_index["<PAD>"], | value: word_index["<PAD>"], | ||||
| padding: "post", | padding: "post", | ||||
| maxlen: 256); | |||||
| maxlen: 256);*/ | |||||
| // input shape is the vocabulary count used for the movie reviews (10,000 words) | // input shape is the vocabulary count used for the movie reviews (10,000 words) | ||||
| int vocab_size = 10000; | int vocab_size = 10000; | ||||
| var model = keras.Sequential(); | var model = keras.Sequential(); | ||||
| model.add(keras.layers.Embedding(vocab_size, 16)); | |||||
| var layer = keras.layers.Embedding(vocab_size, 16); | |||||
| model.add(layer); | |||||
| return false; | return false; | ||||
| } | } | ||||
| @@ -78,17 +79,23 @@ namespace TensorFlowNET.Examples | |||||
| labels_test = labels_test[indices_test]; | labels_test = labels_test[indices_test]; | ||||
| // not completed | // not completed | ||||
| var xs = x_train.hstack<int>(x_test); | |||||
| var xs = x_train.hstack<string>(x_test); | |||||
| var labels = labels_train.hstack<int>(labels_test); | var labels = labels_train.hstack<int>(labels_test); | ||||
| var idx = x_train.size; | var idx = x_train.size; | ||||
| var y_train = labels_train; | var y_train = labels_train; | ||||
| var y_test = labels_test; | var y_test = labels_test; | ||||
| x_train = train_data; | |||||
| train_labels = y_train; | |||||
| // convert x_train | |||||
| train_data = new NDArray(np.int32, (x_train.size, 256)); | |||||
| for (int i = 0; i < x_train.size; i++) | |||||
| train_data[i] = x_train[i].Data<string>(0).Split(',').Select(x => int.Parse(x)).ToArray(); | |||||
| test_data = x_test; | |||||
| test_data = new NDArray(np.int32, (x_test.size, 256)); | |||||
| for (int i = 0; i < x_test.size; i++) | |||||
| test_data[i] = x_test[i].Data<string>(0).Split(',').Select(x => int.Parse(x)).ToArray(); | |||||
| train_labels = y_train; | |||||
| test_labels = y_test; | test_labels = y_test; | ||||
| } | } | ||||