| @@ -11,6 +11,8 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tensorflow.UnitTest", "test | |||||
| EndProject | EndProject | ||||
| Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TensorFlowNET.Console", "src\TensorFlowNET.Console\TensorFlowNET.Console.csproj", "{03F06299-3F4B-4449-A709-3A647657BC0C}" | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "TensorFlowNET.Console", "src\TensorFlowNET.Console\TensorFlowNET.Console.csproj", "{03F06299-3F4B-4449-A709-3A647657BC0C}" | ||||
| EndProject | EndProject | ||||
| Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Tensorflow.Keras", "src\TensorFlowNET.Keras\Tensorflow.Keras.csproj", "{49D71826-C03D-4FA7-9BAC-22C1327E65CF}" | |||||
| EndProject | |||||
| Global | Global | ||||
| GlobalSection(SolutionConfigurationPlatforms) = preSolution | GlobalSection(SolutionConfigurationPlatforms) = preSolution | ||||
| Debug|Any CPU = Debug|Any CPU | Debug|Any CPU = Debug|Any CPU | ||||
| @@ -123,6 +125,30 @@ Global | |||||
| {03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x64.Build.0 = Release|Any CPU | {03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x64.Build.0 = Release|Any CPU | ||||
| {03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x86.ActiveCfg = Release|Any CPU | {03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x86.ActiveCfg = Release|Any CPU | ||||
| {03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x86.Build.0 = Release|Any CPU | {03F06299-3F4B-4449-A709-3A647657BC0C}.Release|x86.Build.0 = Release|Any CPU | ||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|Any CPU.Build.0 = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x64.ActiveCfg = Debug|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x64.Build.0 = Debug|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x86.ActiveCfg = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug|x86.Build.0 = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|Any CPU.ActiveCfg = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|Any CPU.Build.0 = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x64.ActiveCfg = Debug|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x64.Build.0 = Debug|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x86.ActiveCfg = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Debug-Minimal|x86.Build.0 = Debug|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|Any CPU.ActiveCfg = Release|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|Any CPU.Build.0 = Release|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x64.ActiveCfg = Debug|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x64.Build.0 = Debug|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x86.ActiveCfg = Release|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Publish|x86.Build.0 = Release|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|Any CPU.ActiveCfg = Release|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|Any CPU.Build.0 = Release|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x64.ActiveCfg = Release|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x64.Build.0 = Release|x64 | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x86.ActiveCfg = Release|Any CPU | |||||
| {49D71826-C03D-4FA7-9BAC-22C1327E65CF}.Release|x86.Build.0 = Release|Any CPU | |||||
| EndGlobalSection | EndGlobalSection | ||||
| GlobalSection(SolutionProperties) = preSolution | GlobalSection(SolutionProperties) = preSolution | ||||
| HideSolutionNode = FALSE | HideSolutionNode = FALSE | ||||
| @@ -1,5 +1,4 @@ | |||||
| using System; | using System; | ||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow | namespace Tensorflow | ||||
| { | { | ||||
| @@ -12,7 +12,7 @@ | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| <ProjectReference Include="..\TensorFlowNET.Core\Tensorflow.Binding.csproj" /> | |||||
| <ProjectReference Include="..\TensorFlowNET.Keras\Tensorflow.Keras.csproj" /> | |||||
| </ItemGroup> | </ItemGroup> | ||||
| </Project> | </Project> | ||||
| @@ -137,8 +137,6 @@ namespace Tensorflow | |||||
| is_training: is_training, | is_training: is_training, | ||||
| name: name); | name: name); | ||||
| public IPoolFunction max_pool_fn => new MaxPoolFunction(); | |||||
| public Tensor max_pool(Tensor value, int[] ksize, int[] strides, string padding, string data_format = "NHWC", string name = null) | public Tensor max_pool(Tensor value, int[] ksize, int[] strides, string padding, string data_format = "NHWC", string name = null) | ||||
| => nn_ops.max_pool(value, ksize, strides, padding, data_format: data_format, name: name); | => nn_ops.max_pool(value, ksize, strides, padding, data_format: data_format, name: name); | ||||
| @@ -1,42 +0,0 @@ | |||||
| /***************************************************************************** | |||||
| Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved. | |||||
| Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| you may not use this file except in compliance with the License. | |||||
| You may obtain a copy of the License at | |||||
| http://www.apache.org/licenses/LICENSE-2.0 | |||||
| Unless required by applicable law or agreed to in writing, software | |||||
| distributed under the License is distributed on an "AS IS" BASIS, | |||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| See the License for the specific language governing permissions and | |||||
| limitations under the License. | |||||
| ******************************************************************************/ | |||||
| using Tensorflow.Keras.Optimizers; | |||||
| namespace Tensorflow | |||||
| { | |||||
| public partial class tensorflow | |||||
| { | |||||
| public KerasOptimizers optimizers => new KerasOptimizers(); | |||||
| public class KerasOptimizers | |||||
| { | |||||
| public SGD SGD(float learning_rate) => new SGD(learning_rate); | |||||
| public Adam Adam(float learning_rate = 0.001f, | |||||
| float beta_1 = 0.9f, | |||||
| float beta_2 = 0.999f, | |||||
| float epsilon = 1e-7f, | |||||
| bool amsgrad = false, | |||||
| string name = "Adam") => new Adam(learning_rate: learning_rate, | |||||
| beta_1: beta_1, | |||||
| beta_2: beta_2, | |||||
| epsilon: epsilon, | |||||
| amsgrad: amsgrad, | |||||
| name: name); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -15,7 +15,6 @@ | |||||
| ******************************************************************************/ | ******************************************************************************/ | ||||
| using System.Collections.Generic; | using System.Collections.Generic; | ||||
| using Tensorflow.Keras.Optimizers; | |||||
| using Tensorflow.Train; | using Tensorflow.Train; | ||||
| namespace Tensorflow | namespace Tensorflow | ||||
| @@ -87,7 +86,7 @@ namespace Tensorflow | |||||
| public CheckpointState get_checkpoint_state(string checkpoint_dir, string latest_filename = null) | public CheckpointState get_checkpoint_state(string checkpoint_dir, string latest_filename = null) | ||||
| => checkpoint_management.get_checkpoint_state(checkpoint_dir, latest_filename: latest_filename); | => checkpoint_management.get_checkpoint_state(checkpoint_dir, latest_filename: latest_filename); | ||||
| public Tensor polynomial_decay(float learning_rate, | |||||
| /*public Tensor polynomial_decay(float learning_rate, | |||||
| RefVariable global_step, | RefVariable global_step, | ||||
| float decay_steps, | float decay_steps, | ||||
| float end_learning_rate = 0.0001f, | float end_learning_rate = 0.0001f, | ||||
| @@ -105,7 +104,7 @@ namespace Tensorflow | |||||
| var decayed_lr = decayed.__call__(global_step); | var decayed_lr = decayed.__call__(global_step); | ||||
| return decayed_lr; | return decayed_lr; | ||||
| } | |||||
| }*/ | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -14,7 +14,7 @@ namespace Tensorflow.Keras.ArgsDefinition | |||||
| public int MaxQueueSize { get; set; } = 10; | public int MaxQueueSize { get; set; } = 10; | ||||
| public int Workers { get; set; } = 1; | public int Workers { get; set; } = 1; | ||||
| public bool UseMultiprocessing { get; set; } = false; | public bool UseMultiprocessing { get; set; } = false; | ||||
| public Model Model { get; set; } | |||||
| public IModel Model { get; set; } | |||||
| public IVariableV1 StepsPerExecution { get; set; } | public IVariableV1 StepsPerExecution { get; set; } | ||||
| } | } | ||||
| } | } | ||||
| @@ -4,7 +4,7 @@ namespace Tensorflow.Keras.ArgsDefinition | |||||
| { | { | ||||
| public class NodeArgs | public class NodeArgs | ||||
| { | { | ||||
| public Layer[] InboundLayers { get; set; } | |||||
| public ILayer[] InboundLayers { get; set; } | |||||
| public int[] NodeIndices { get; set; } | public int[] NodeIndices { get; set; } | ||||
| public int[] TensorIndices { get; set; } | public int[] TensorIndices { get; set; } | ||||
| public Tensors InputTensors { get; set; } | public Tensors InputTensors { get; set; } | ||||
| @@ -5,6 +5,6 @@ namespace Tensorflow.Keras.ArgsDefinition | |||||
| { | { | ||||
| public class SequentialArgs : ModelArgs | public class SequentialArgs : ModelArgs | ||||
| { | { | ||||
| public List<Layer> Layers { get; set; } | |||||
| public List<ILayer> Layers { get; set; } | |||||
| } | } | ||||
| } | } | ||||
| @@ -13,6 +13,6 @@ namespace Tensorflow.Keras.ArgsDefinition | |||||
| public int MaxQueueSize { get; set; } | public int MaxQueueSize { get; set; } | ||||
| public int Worker { get; set; } | public int Worker { get; set; } | ||||
| public bool UseMultiprocessing { get; set; } | public bool UseMultiprocessing { get; set; } | ||||
| public Model Model { get; set; } | |||||
| public IModel Model { get; set; } | |||||
| } | } | ||||
| } | } | ||||
| @@ -1,74 +0,0 @@ | |||||
| /***************************************************************************** | |||||
| Copyright 2020 Haiping Chen. All Rights Reserved. | |||||
| Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| you may not use this file except in compliance with the License. | |||||
| You may obtain a copy of the License at | |||||
| http://www.apache.org/licenses/LICENSE-2.0 | |||||
| Unless required by applicable law or agreed to in writing, software | |||||
| distributed under the License is distributed on an "AS IS" BASIS, | |||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| See the License for the specific language governing permissions and | |||||
| limitations under the License. | |||||
| ******************************************************************************/ | |||||
| using NumSharp; | |||||
| using System; | |||||
| using System.IO; | |||||
| using System.Net; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class Mnist | |||||
| { | |||||
| string origin_folder = "https://storage.googleapis.com/tensorflow/tf-keras-datasets/"; | |||||
| string file_name = "mnist.npz"; | |||||
| /// <summary> | |||||
| /// Loads the [MNIST dataset](http://yann.lecun.com/exdb/mnist/). | |||||
| /// </summary> | |||||
| /// <returns></returns> | |||||
| public DatasetPass load_data() | |||||
| { | |||||
| var file = Download(); | |||||
| var bytes = File.ReadAllBytes(file); | |||||
| var datax = LoadX(bytes); | |||||
| var datay = LoadY(bytes); | |||||
| return new DatasetPass | |||||
| { | |||||
| Train = (datax.Item1, datay.Item1), | |||||
| Test = (datax.Item2, datay.Item2) | |||||
| }; | |||||
| } | |||||
| (NDArray, NDArray) LoadX(byte[] bytes) | |||||
| { | |||||
| var y = np.Load_Npz<byte[,,]>(bytes); | |||||
| return (y["x_train.npy"], y["x_test.npy"]); | |||||
| } | |||||
| (NDArray, NDArray) LoadY(byte[] bytes) | |||||
| { | |||||
| var y = np.Load_Npz<byte[]>(bytes); | |||||
| return (y["y_train.npy"], y["y_test.npy"]); | |||||
| } | |||||
| string Download() | |||||
| { | |||||
| var fileSaveTo = Path.Combine(Path.GetTempPath(), file_name); | |||||
| if (File.Exists(fileSaveTo)) | |||||
| { | |||||
| Console.WriteLine($"The file {fileSaveTo} already exists"); | |||||
| return fileSaveTo; | |||||
| } | |||||
| using var wc = new WebClient(); | |||||
| wc.DownloadFileTaskAsync(origin_folder + file_name, fileSaveTo).Wait(); | |||||
| return fileSaveTo; | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| namespace Tensorflow.Keras.Engine | |||||
| { | |||||
| public class CallContext | |||||
| { | |||||
| public CallContextManager enter() | |||||
| { | |||||
| return new CallContextManager(); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -4,7 +4,7 @@ using System.Text; | |||||
| namespace Tensorflow.Keras.Engine | namespace Tensorflow.Keras.Engine | ||||
| { | { | ||||
| class InputSpec | |||||
| public interface IModel | |||||
| { | { | ||||
| } | } | ||||
| } | } | ||||
| @@ -0,0 +1,16 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Engine | |||||
| { | |||||
| public interface INode | |||||
| { | |||||
| Tensors input_tensors { get; } | |||||
| Tensors Outputs { get; } | |||||
| ILayer Layer { get; set; } | |||||
| List<Tensor> KerasInputs { get; set; } | |||||
| INode[] ParentNodes { get; } | |||||
| IEnumerable<(ILayer, int, int, Tensor)> iterate_inbound(); | |||||
| } | |||||
| } | |||||
| @@ -5,12 +5,13 @@ | |||||
| /// </summary> | /// </summary> | ||||
| public class KerasHistory | public class KerasHistory | ||||
| { | { | ||||
| Layer layer; | |||||
| ILayer layer; | |||||
| public ILayer Layer => layer; | |||||
| int node_index; | int node_index; | ||||
| int tensor_index; | int tensor_index; | ||||
| Tensor tensor; | Tensor tensor; | ||||
| public KerasHistory(Layer layer, int node_index, int tensor_index, Tensor tensor) | |||||
| public KerasHistory(ILayer layer, int node_index, int tensor_index, Tensor tensor) | |||||
| { | { | ||||
| this.layer = layer; | this.layer = layer; | ||||
| this.node_index = node_index; | this.node_index = node_index; | ||||
| @@ -18,7 +19,7 @@ | |||||
| this.tensor = tensor; | this.tensor = tensor; | ||||
| } | } | ||||
| public void Deconstruct(out Layer layer, out int node_index, out int tensor_index) | |||||
| public void Deconstruct(out ILayer layer, out int node_index, out int tensor_index) | |||||
| { | { | ||||
| layer = this.layer; | layer = this.layer; | ||||
| node_index = this.node_index; | node_index = this.node_index; | ||||
| @@ -27,8 +28,5 @@ | |||||
| public override string ToString() | public override string ToString() | ||||
| => $"{layer.GetType().Name} {layer.Name} {tensor.name}"; | => $"{layer.GetType().Name} {layer.Name} {tensor.name}"; | ||||
| public static implicit operator Layer(KerasHistory history) | |||||
| => history.layer; | |||||
| } | } | ||||
| } | } | ||||
| @@ -1,121 +0,0 @@ | |||||
| /***************************************************************************** | |||||
| Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved. | |||||
| Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| you may not use this file except in compliance with the License. | |||||
| You may obtain a copy of the License at | |||||
| http://www.apache.org/licenses/LICENSE-2.0 | |||||
| Unless required by applicable law or agreed to in writing, software | |||||
| distributed under the License is distributed on an "AS IS" BASIS, | |||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| See the License for the specific language governing permissions and | |||||
| limitations under the License. | |||||
| ******************************************************************************/ | |||||
| using System.Collections.Generic; | |||||
| using System.Linq; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Engine | |||||
| { | |||||
| /// <summary> | |||||
| /// A `Node` describes the connectivity between two layers. | |||||
| /// | |||||
| /// Each time a layer is connected to some new input, | |||||
| /// a node is added to `layer._inbound_nodes`. | |||||
| /// Each time the output of a layer is used by another layer, | |||||
| /// a node is added to `layer._outbound_nodes`. | |||||
| /// </summary> | |||||
| public partial class Node | |||||
| { | |||||
| NodeArgs args; | |||||
| public int[] node_indices; | |||||
| public int[] tensor_indices; | |||||
| public Tensors input_tensors => args.InputTensors; | |||||
| public Tensors Outputs => args.Outputs; | |||||
| public TensorShape[] input_shapes; | |||||
| public TensorShape[] output_shapes; | |||||
| public List<Tensor> KerasInputs = new List<Tensor>(); | |||||
| public Layer Layer { get; set; } | |||||
| public bool IsInput => args.InputTensors == null; | |||||
| public int[] FlatInputIds { get; set; } | |||||
| public int[] FlatOutputIds { get; set; } | |||||
| bool _single_positional_tensor_passed => KerasInputs.Count() == 1; | |||||
| Dictionary<int, int> _keras_inputs_ids_and_indices = new Dictionary<int, int>(); | |||||
| public Node[] ParentNodes | |||||
| { | |||||
| get | |||||
| { | |||||
| var node_deps = new List<Node>(); | |||||
| foreach (var kt in KerasInputs) | |||||
| { | |||||
| var (layer, node_index, _) = kt.KerasHistory; | |||||
| if (layer != null) | |||||
| node_deps.append(layer.InboundNodes[node_index]); | |||||
| } | |||||
| return node_deps.ToArray(); | |||||
| } | |||||
| } | |||||
| public Node(Layer layer, NodeArgs args) | |||||
| { | |||||
| this.args = args; | |||||
| this.Layer = layer; | |||||
| if (args.InputTensors != null) | |||||
| KerasInputs.AddRange(args.InputTensors); | |||||
| foreach (var (i, ele) in enumerate(KerasInputs)) | |||||
| _keras_inputs_ids_and_indices[i] = ele.GetHashCode(); | |||||
| // Wire up Node to Layers. | |||||
| layer.InboundNodes.Add(this); | |||||
| foreach (var kt in KerasInputs) | |||||
| { | |||||
| if (kt.KerasHistory == null) | |||||
| continue; | |||||
| var (inbound_layer, _, _) = kt.KerasHistory; | |||||
| if (inbound_layer != null) | |||||
| inbound_layer.OutboundNodes.Add(this); | |||||
| } | |||||
| // Set metadata on outputs. | |||||
| var node_index = layer.InboundNodes.Count - 1; | |||||
| foreach (var (i, tensor) in enumerate(Outputs)) | |||||
| tensor.KerasHistory = new KerasHistory(layer, node_index, i, tensor); | |||||
| // Cached for performance. | |||||
| FlatInputIds = KerasInputs.Select(x => x.GetHashCode()).ToArray(); | |||||
| FlatOutputIds = Outputs.Select(x => x.GetHashCode()).ToArray(); | |||||
| } | |||||
| /// <summary> | |||||
| /// Maps Keras Tensors to computed Tensors using `tensor_dict`. | |||||
| /// </summary> | |||||
| /// <param name="tensor_dict"></param> | |||||
| /// <returns></returns> | |||||
| public Tensors MapArguments(Dictionary<int, Queue<Tensor>> tensor_dict) | |||||
| { | |||||
| if (_single_positional_tensor_passed) | |||||
| { | |||||
| var kt_id = _keras_inputs_ids_and_indices[0]; | |||||
| return tensor_dict[kt_id].Dequeue(); | |||||
| } | |||||
| else | |||||
| { | |||||
| var flat_arguments = KerasInputs.Select(x => x).ToArray(); | |||||
| foreach (var (kt_index, kt_id) in enumerate(_keras_inputs_ids_and_indices)) | |||||
| flat_arguments[kt_index] = tensor_dict[kt_id].Dequeue(); | |||||
| return flat_arguments; | |||||
| } | |||||
| } | |||||
| public override string ToString() | |||||
| => $"{Layer.Name}, {KerasInputs.Count} inputs: {string.Join(",", KerasInputs.Select(x => x.name))}"; | |||||
| } | |||||
| } | |||||
| @@ -1,134 +0,0 @@ | |||||
| /***************************************************************************** | |||||
| Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved. | |||||
| Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| you may not use this file except in compliance with the License. | |||||
| You may obtain a copy of the License at | |||||
| http://www.apache.org/licenses/LICENSE-2.0 | |||||
| Unless required by applicable law or agreed to in writing, software | |||||
| distributed under the License is distributed on an "AS IS" BASIS, | |||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| See the License for the specific language governing permissions and | |||||
| limitations under the License. | |||||
| ******************************************************************************/ | |||||
| using System.Collections.Generic; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Layers; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Engine | |||||
| { | |||||
| /// <summary> | |||||
| /// `Sequential` groups a linear stack of layers into a `tf.keras.Model`. | |||||
| /// `Sequential` provides training and inference features on this model. | |||||
| /// </summary> | |||||
| public class Sequential : Model | |||||
| { | |||||
| SequentialArgs args; | |||||
| bool _is_graph_network; | |||||
| Tensor inputs; | |||||
| Tensor outputs; | |||||
| bool computeOutputAndMaskJointly; | |||||
| bool autoTrackSubLayers; | |||||
| TensorShape inferredInputShape; | |||||
| bool hasExplicitInputShape; | |||||
| TF_DataType inputDType; | |||||
| List<Layer> layers => args.Layers; | |||||
| public TensorShape output_shape => outputs.TensorShape; | |||||
| bool built = false; | |||||
| public Sequential(SequentialArgs args) | |||||
| : base(new ModelArgs | |||||
| { | |||||
| Name = args.Name | |||||
| }) | |||||
| { | |||||
| this.args = args; | |||||
| if (args.Layers == null) | |||||
| args.Layers = new List<Layer>(); | |||||
| // SupportsMasking = true; | |||||
| computeOutputAndMaskJointly = true; | |||||
| autoTrackSubLayers = false; | |||||
| hasExplicitInputShape = false; | |||||
| _is_graph_network = false; | |||||
| } | |||||
| public void add(Tensor tensor) | |||||
| { | |||||
| Layer layer = tensor.KerasHistory; | |||||
| add(layer); | |||||
| } | |||||
| /// <summary> | |||||
| /// Adds a layer instance on top of the layer stack. | |||||
| /// </summary> | |||||
| /// <param name="layer"></param> | |||||
| public void add(Layer layer) | |||||
| { | |||||
| built = false; | |||||
| var set_inputs = false; | |||||
| if (layers.Count == 0) | |||||
| { | |||||
| if (layer is InputLayer) | |||||
| { | |||||
| set_inputs = true; | |||||
| } | |||||
| else | |||||
| { | |||||
| if (layer.BatchInputShape != null) | |||||
| { | |||||
| // Instantiate an input layer. | |||||
| var x = tf.keras.Input( | |||||
| shape: layer.BatchInputShape, | |||||
| dtype: layer.DType, | |||||
| name: layer.Name + "_input"); | |||||
| // This will build the current layer | |||||
| // and create the node connecting the current layer | |||||
| // to the input layer we just created. | |||||
| layer.Apply(x); | |||||
| set_inputs = true; | |||||
| } | |||||
| } | |||||
| if (set_inputs) | |||||
| { | |||||
| // If an input layer (placeholder) is available. | |||||
| outputs = layer.InboundNodes[^1].Outputs; | |||||
| } | |||||
| } | |||||
| else if (outputs != null) | |||||
| { | |||||
| outputs = layer.Apply(outputs); | |||||
| } | |||||
| if (set_inputs || _is_graph_network) | |||||
| { | |||||
| _init_graph_network(inputs, outputs); | |||||
| } | |||||
| else | |||||
| { | |||||
| } | |||||
| } | |||||
| void _init_graph_network(Tensor inputs, Tensor outputs) | |||||
| { | |||||
| _is_graph_network = true; | |||||
| this.inputs = inputs; | |||||
| this.outputs = outputs; | |||||
| built = true; | |||||
| _map_graph_network(inputs, outputs); | |||||
| } | |||||
| void _map_graph_network(Tensor inputs, Tensor outputs) | |||||
| { | |||||
| layers.add(outputs.KerasHistory); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,20 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| using Tensorflow.Keras.Engine; | |||||
| namespace Tensorflow.Keras | |||||
| { | |||||
| public interface ILayer | |||||
| { | |||||
| string Name { get; } | |||||
| bool Trainable { get; } | |||||
| List<ILayer> Layers { get; } | |||||
| List<INode> InboundNodes { get; } | |||||
| List<INode> OutboundNodes { get; } | |||||
| Tensors Apply(Tensors inputs, Tensor state = null, bool is_training = false); | |||||
| List<IVariableV1> trainable_variables { get; } | |||||
| TensorShape output_shape { get; } | |||||
| int count_params(); | |||||
| } | |||||
| } | |||||
| @@ -1,41 +0,0 @@ | |||||
| using System; | |||||
| using Tensorflow.Keras.Utils; | |||||
| namespace Tensorflow.Keras.Losses | |||||
| { | |||||
| /// <summary> | |||||
| /// Loss base class. | |||||
| /// </summary> | |||||
| public abstract class Loss | |||||
| { | |||||
| protected string reduction; | |||||
| protected string name; | |||||
| bool _allow_sum_over_batch_size; | |||||
| string _name_scope; | |||||
| public string Reduction => reduction; | |||||
| public Loss(string reduction = ReductionV2.AUTO, string name = null) | |||||
| { | |||||
| this.reduction = reduction; | |||||
| this.name = name; | |||||
| _allow_sum_over_batch_size = false; | |||||
| } | |||||
| public virtual Tensor Apply(Tensor y_true, Tensor y_pred, bool from_logits = false, int axis = -1) | |||||
| { | |||||
| throw new NotImplementedException(""); | |||||
| } | |||||
| public Tensor Call(Tensor y_true, Tensor y_pred) | |||||
| { | |||||
| var losses = Apply(y_true, y_pred); | |||||
| return losses_utils.compute_weighted_loss(losses, reduction: ReductionV2.SUM_OVER_BATCH_SIZE); | |||||
| } | |||||
| void _set_name_scope() | |||||
| { | |||||
| _name_scope = name; | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,12 +0,0 @@ | |||||
| namespace Tensorflow.Keras.Losses | |||||
| { | |||||
| public class LossFunctionWrapper : Loss | |||||
| { | |||||
| public LossFunctionWrapper(string reduction = ReductionV2.AUTO, | |||||
| string name = null) | |||||
| : base(reduction: reduction, | |||||
| name: name) | |||||
| { | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,33 +0,0 @@ | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Losses | |||||
| { | |||||
| public class SparseCategoricalCrossentropy : LossFunctionWrapper, ILossFunc | |||||
| { | |||||
| public SparseCategoricalCrossentropy(bool from_logits = false, | |||||
| string reduction = ReductionV2.AUTO, | |||||
| string name = "sparse_categorical_crossentropy") : | |||||
| base(reduction: reduction, | |||||
| name: name) | |||||
| { | |||||
| } | |||||
| public override Tensor Apply(Tensor target, Tensor output, bool from_logits = false, int axis = -1) | |||||
| { | |||||
| target = tf.cast(target, dtype: TF_DataType.TF_INT64); | |||||
| // Try to adjust the shape so that rank of labels = rank of logits - 1. | |||||
| var output_shape = array_ops.shape_v2(output); | |||||
| var output_rank = output.TensorShape.ndim; | |||||
| var target_rank = target.TensorShape.ndim; | |||||
| var update_shape = target_rank != output_rank - 1; | |||||
| if (update_shape) | |||||
| { | |||||
| target = array_ops.reshape(target, new int[] { -1 }); | |||||
| output = array_ops.reshape(output, new int[] { -1, output_shape[-1].numpy() }); | |||||
| } | |||||
| return tf.nn.sparse_softmax_cross_entropy_with_logits(target, output); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,14 +0,0 @@ | |||||
| namespace Tensorflow.Keras.Metrics | |||||
| { | |||||
| /// <summary> | |||||
| /// Computes the (weighted) mean of the given values. | |||||
| /// </summary> | |||||
| public class Mean : Reduce | |||||
| { | |||||
| public Mean(string name = "mean", TF_DataType dtype = TF_DataType.TF_FLOAT) | |||||
| : base(Reduction.WEIGHTED_MEAN, name, dtype: dtype) | |||||
| { | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,27 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Metrics | |||||
| { | |||||
| public class MeanMetricWrapper : Mean | |||||
| { | |||||
| string name; | |||||
| Func<Tensor, Tensor, Tensor> _fn = null; | |||||
| public MeanMetricWrapper(Func<Tensor, Tensor, Tensor> fn, string name, TF_DataType dtype = TF_DataType.TF_FLOAT) | |||||
| : base(name: name, dtype: dtype) | |||||
| { | |||||
| _fn = fn; | |||||
| } | |||||
| public override Tensor update_state(Tensor y_true, Tensor y_pred, Tensor sample_weight = null) | |||||
| { | |||||
| y_true = math_ops.cast(y_true, _dtype); | |||||
| y_pred = math_ops.cast(y_pred, _dtype); | |||||
| var matches = _fn(y_true, y_pred); | |||||
| return update_state(matches, sample_weight: sample_weight); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,62 +0,0 @@ | |||||
| using System; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Metrics | |||||
| { | |||||
| /// <summary> | |||||
| /// Encapsulates metric logic and state. | |||||
| /// </summary> | |||||
| public class Metric : Layer | |||||
| { | |||||
| protected IVariableV1 total; | |||||
| protected IVariableV1 count; | |||||
| protected string _reduction; | |||||
| protected TF_DataType _dtype; | |||||
| public Metric(string name = null, TF_DataType dtype = TF_DataType.DtInvalid) | |||||
| : base(new LayerArgs | |||||
| { | |||||
| Name = name, | |||||
| DType = dtype | |||||
| }) | |||||
| { | |||||
| stateful = true; | |||||
| built = true; | |||||
| } | |||||
| protected override IVariableV1 add_weight(string name, | |||||
| TensorShape shape = null, | |||||
| TF_DataType dtype = TF_DataType.TF_FLOAT, | |||||
| IInitializer initializer = null, | |||||
| IRegularizer regularizer = null, | |||||
| VariableSynchronization synchronization = VariableSynchronization.OnRead, | |||||
| VariableAggregation aggregation = VariableAggregation.Sum, | |||||
| bool trainable = true, | |||||
| Func<VariableArgs, IVariableV1> getter = null) | |||||
| { | |||||
| if (shape == null) | |||||
| shape = new TensorShape(new int[0]); | |||||
| return tf_with(ops.init_scope(), delegate | |||||
| { | |||||
| return base.add_weight(name, shape, | |||||
| dtype: dtype, | |||||
| trainable: false, | |||||
| initializer: initializer, | |||||
| synchronization: synchronization, | |||||
| aggregation: aggregation); | |||||
| }); | |||||
| } | |||||
| public virtual Tensor update_state(Tensor y_true, Tensor y_pred, Tensor sample_weight = null) | |||||
| => throw new NotImplementedException(""); | |||||
| public virtual Tensor result() | |||||
| => throw new NotImplementedException(""); | |||||
| public override string ToString() | |||||
| => $"{name} {(float)total.numpy()}/{(float)count.numpy()}"; | |||||
| } | |||||
| } | |||||
| @@ -1,74 +0,0 @@ | |||||
| using Tensorflow.Keras.Losses; | |||||
| using Tensorflow.Keras.Utils; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Metrics | |||||
| { | |||||
| /// <summary> | |||||
| /// Encapsulates metrics that perform a reduce operation on the values. | |||||
| /// </summary> | |||||
| public class Reduce : Metric | |||||
| { | |||||
| public Reduce(string reduction, string name, TF_DataType dtype = TF_DataType.DtInvalid) | |||||
| : base(name: name, dtype: dtype) | |||||
| { | |||||
| _reduction = reduction; | |||||
| _dtype = dtype; | |||||
| total = add_weight("total", initializer: tf.zeros_initializer); | |||||
| if (reduction == Reduction.WEIGHTED_MEAN || | |||||
| reduction == Reduction.SUM_OVER_BATCH_SIZE) | |||||
| { | |||||
| count = add_weight("count", initializer: tf.zeros_initializer); | |||||
| } | |||||
| } | |||||
| public Tensor update_state(Tensor values, Tensor sample_weight = null) | |||||
| { | |||||
| if (sample_weight != null) | |||||
| { | |||||
| (values, sample_weight) = losses_utils.squeeze_or_expand_dimensions( | |||||
| values, sample_weight: sample_weight); | |||||
| sample_weight = math_ops.cast(sample_weight, dtype: values.dtype); | |||||
| values = math_ops.multiply(values, sample_weight); | |||||
| } | |||||
| Tensor update_total_op = null; | |||||
| var value_sum = math_ops.reduce_sum(values); | |||||
| tf_with(ops.control_dependencies(new[] { value_sum }), ctl => | |||||
| { | |||||
| update_total_op = total.assign_add(value_sum); | |||||
| }); | |||||
| // Exit early if the reduction doesn't have a denominator. | |||||
| if (_reduction == Reduction.SUM) | |||||
| return update_total_op; | |||||
| // Update `count` for reductions that require a denominator. | |||||
| Tensor num_values = null; | |||||
| if (_reduction == Reduction.SUM_OVER_BATCH_SIZE) | |||||
| num_values = math_ops.cast(array_ops.size(values), _dtype); | |||||
| else if (_reduction == ReductionV2.WEIGHTED_MEAN) | |||||
| { | |||||
| if (sample_weight == null) | |||||
| num_values = math_ops.cast(array_ops.size(values), _dtype); | |||||
| else | |||||
| num_values = math_ops.reduce_sum(sample_weight); | |||||
| } | |||||
| return tf_with(ops.control_dependencies(new[] { update_total_op }), ctl | |||||
| => count.assign_add(num_values)); | |||||
| } | |||||
| public override Tensor result() | |||||
| { | |||||
| if (_reduction == Reduction.SUM) | |||||
| return array_ops.identity(total.AsTensor()); | |||||
| else if (_reduction == Reduction.WEIGHTED_MEAN || _reduction == Reduction.SUM_OVER_BATCH_SIZE) | |||||
| return math_ops.div_no_nan(total.AsTensor(), count.AsTensor()); | |||||
| return base.result(); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,6 +0,0 @@ | |||||
| namespace Tensorflow.Keras.Metrics | |||||
| { | |||||
| class Sum | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,210 +0,0 @@ | |||||
| /***************************************************************************** | |||||
| Copyright 2018 The TensorFlow.NET Authors. All Rights Reserved. | |||||
| Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| you may not use this file except in compliance with the License. | |||||
| You may obtain a copy of the License at | |||||
| http://www.apache.org/licenses/LICENSE-2.0 | |||||
| Unless required by applicable law or agreed to in writing, software | |||||
| distributed under the License is distributed on an "AS IS" BASIS, | |||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| See the License for the specific language governing permissions and | |||||
| limitations under the License. | |||||
| ******************************************************************************/ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Layers | |||||
| { | |||||
| public class Layer : Keras.Engine.Layer | |||||
| { | |||||
| protected Graph _graph; | |||||
| protected VariableScope _scope; | |||||
| protected VariableScope _current_scope; | |||||
| protected bool? _reuse; | |||||
| protected bool _use_resource_variables; | |||||
| protected bool _keras_style; | |||||
| public Layer(bool trainable = true, | |||||
| string name = null, | |||||
| TF_DataType dtype = TF_DataType.DtInvalid, | |||||
| bool? _reuse = null) : | |||||
| base(new LayerArgs | |||||
| { | |||||
| Trainable = trainable, | |||||
| Name = name, | |||||
| DType = dtype | |||||
| }) | |||||
| { | |||||
| // For backwards compatibility, legacy layers do not use `ResourceVariable` | |||||
| // by default. | |||||
| this._use_resource_variables = false; | |||||
| this._reuse = _reuse; | |||||
| // Avoid an incorrect lint error | |||||
| trainable_weights = new List<IVariableV1>(); | |||||
| non_trainable_weights = new List<IVariableV1>(); | |||||
| this.built = false; | |||||
| _keras_style = false; | |||||
| } | |||||
| public virtual (Tensor, Tensor) apply(Tensor inputs, Tensor training = null) | |||||
| { | |||||
| var results = __call__(inputs, training: training); | |||||
| return (results[0], results[1]); | |||||
| } | |||||
| public Tensors __call__(Tensors inputs, | |||||
| Tensor state = null, | |||||
| Tensor training = null, | |||||
| VariableScope scope = null) | |||||
| { | |||||
| _set_scope(scope); | |||||
| _graph = ops._get_graph_from_inputs(inputs, graph: _graph); | |||||
| variable_scope scope_context_manager = null; | |||||
| if (built) | |||||
| { | |||||
| scope_context_manager = tf.variable_scope(_scope, | |||||
| reuse: true, | |||||
| auxiliary_name_scope: false); | |||||
| } | |||||
| else | |||||
| { | |||||
| scope_context_manager = tf.variable_scope(_scope, | |||||
| reuse: _reuse, | |||||
| auxiliary_name_scope: false); | |||||
| } | |||||
| Tensors outputs = null; | |||||
| tf_with(scope_context_manager, scope2 => | |||||
| { | |||||
| _current_scope = scope2; | |||||
| // Actually call layer | |||||
| outputs = base.Apply(inputs, | |||||
| state: state, | |||||
| is_training: training == null ? false : false); | |||||
| }); | |||||
| // Update global default collections. | |||||
| _add_elements_to_collection(updates.ToArray(), new string[] { tf.GraphKeys.UPDATE_OPS }); | |||||
| return outputs; | |||||
| } | |||||
| protected virtual void _add_elements_to_collection(Operation[] elements, string[] collection_list) | |||||
| { | |||||
| foreach (var name in collection_list) | |||||
| { | |||||
| var collection = ops.get_collection_ref<Operation>(name); | |||||
| foreach (var element in elements) | |||||
| if (!collection.Contains(element)) | |||||
| collection.Add(element); | |||||
| } | |||||
| } | |||||
| /// <summary> | |||||
| /// Adds a new variable to the layer, or gets an existing one; returns it. | |||||
| /// </summary> | |||||
| /// <param name="name"></param> | |||||
| /// <param name="shape"></param> | |||||
| /// <param name="dtype"></param> | |||||
| /// <param name="initializer"></param> | |||||
| /// <param name="trainable"></param> | |||||
| /// <param name="synchronization"></param> | |||||
| /// <param name="aggregation"></param> | |||||
| /// <returns></returns> | |||||
| protected virtual IVariableV1 add_weight(string name, | |||||
| int[] shape, | |||||
| TF_DataType dtype = TF_DataType.DtInvalid, | |||||
| IInitializer initializer = null, | |||||
| bool trainable = true, | |||||
| VariableSynchronization synchronization = VariableSynchronization.Auto, | |||||
| VariableAggregation aggregation = VariableAggregation.None) | |||||
| { | |||||
| var default_graph = ops.get_default_graph(); | |||||
| Graph init_graph = null; | |||||
| IVariableV1[] existing_variables = null; | |||||
| if (synchronization == VariableSynchronization.OnRead) | |||||
| trainable = false; | |||||
| if (default_graph.building_function) | |||||
| { | |||||
| throw new NotImplementedException("add_weight"); | |||||
| } | |||||
| else | |||||
| { | |||||
| init_graph = default_graph; | |||||
| existing_variables = variables.global_variables().ToArray(); | |||||
| } | |||||
| if (dtype == TF_DataType.DtInvalid) | |||||
| dtype = TF_DataType.TF_FLOAT; | |||||
| _set_scope(); | |||||
| var reuse = built || (_reuse != null && _reuse.Value); | |||||
| return tf_with(tf.variable_scope(_scope, | |||||
| reuse: reuse, | |||||
| auxiliary_name_scope: false), scope => | |||||
| { | |||||
| _current_scope = scope; | |||||
| return tf_with(ops.name_scope(_name_scope()), delegate | |||||
| { | |||||
| var variable = base.add_weight(name, | |||||
| shape, | |||||
| dtype: dtype, | |||||
| initializer: initializer, | |||||
| trainable: trainable, | |||||
| getter: (args) => | |||||
| tf.compat.v1.get_variable(args.Name, | |||||
| shape: args.Shape, | |||||
| dtype: args.DType, | |||||
| initializer: args.Initializer, | |||||
| trainable: args.Trainable) | |||||
| ); | |||||
| //if (init_graph != null) | |||||
| //var trainable_variables = variables.trainable_variables(); | |||||
| return variable; | |||||
| }); | |||||
| }); | |||||
| } | |||||
| protected override string _name_scope() | |||||
| { | |||||
| return _current_scope.original_name_scope; | |||||
| } | |||||
| protected void _set_scope(VariableScope scope = null) | |||||
| { | |||||
| if (_scope == null) | |||||
| { | |||||
| if (_reuse.HasValue && _reuse.Value) | |||||
| { | |||||
| throw new NotImplementedException("_set_scope _reuse.HasValue"); | |||||
| /*with(tf.variable_scope(scope == null ? _base_name : scope), | |||||
| captured_scope => _scope = captured_scope);*/ | |||||
| } | |||||
| else | |||||
| { | |||||
| tf_with(tf.variable_scope(scope, default_name: base_name), captured_scope => | |||||
| { | |||||
| // convert variable_scope to VariableScope | |||||
| _scope = captured_scope; | |||||
| }); | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -71,7 +71,7 @@ namespace Tensorflow | |||||
| /// <param name="training"></param> | /// <param name="training"></param> | ||||
| /// <param name="state"></param> | /// <param name="state"></param> | ||||
| /// <returns></returns> | /// <returns></returns> | ||||
| protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| protected Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| { | { | ||||
| var one = constant_op.constant(1, dtype: dtypes.int32); | var one = constant_op.constant(1, dtype: dtypes.int32); | ||||
| // Parameters of gates are concatenated into one multiply for efficiency. | // Parameters of gates are concatenated into one multiply for efficiency. | ||||
| @@ -66,7 +66,7 @@ namespace Tensorflow | |||||
| built = true; | built = true; | ||||
| } | } | ||||
| protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| protected Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| { | { | ||||
| // Most basic RNN: output = new_state = act(W * input + U * state + B). | // Most basic RNN: output = new_state = act(W * input + U * state + B). | ||||
| var concat = array_ops.concat(new Tensor[] { inputs, state }, 1); | var concat = array_ops.concat(new Tensor[] { inputs, state }, 1); | ||||
| @@ -22,7 +22,7 @@ using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Operations | namespace Tensorflow.Operations | ||||
| { | { | ||||
| internal class ConvolutionInternal | |||||
| public class ConvolutionInternal | |||||
| { | { | ||||
| ConvolutionalArgs args; | ConvolutionalArgs args; | ||||
| @@ -13,17 +13,165 @@ | |||||
| See the License for the specific language governing permissions and | See the License for the specific language governing permissions and | ||||
| limitations under the License. | limitations under the License. | ||||
| ******************************************************************************/ | ******************************************************************************/ | ||||
| using static Tensorflow.Binding; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using System; | |||||
| namespace Tensorflow | namespace Tensorflow | ||||
| { | { | ||||
| public class LayerRnnCell : RnnCell | public class LayerRnnCell : RnnCell | ||||
| { | { | ||||
| public LayerRnnCell(bool? _reuse = null, | |||||
| string name = null, | |||||
| TF_DataType dtype = TF_DataType.DtInvalid) : base(_reuse: _reuse, | |||||
| protected InputSpec inputSpec; | |||||
| protected bool built; | |||||
| protected Graph _graph; | |||||
| protected VariableScope _scope; | |||||
| protected VariableScope _current_scope; | |||||
| protected bool? _reuse; | |||||
| protected bool _use_resource_variables; | |||||
| protected bool _keras_style; | |||||
| public LayerRnnCell(bool trainable = true, | |||||
| string name = null, | |||||
| TF_DataType dtype = TF_DataType.DtInvalid, | |||||
| bool? _reuse = null) : base(_reuse: _reuse, | |||||
| name: name, | name: name, | ||||
| dtype: dtype) | dtype: dtype) | ||||
| { | { | ||||
| // For backwards compatibility, legacy layers do not use `ResourceVariable` | |||||
| // by default. | |||||
| this._use_resource_variables = false; | |||||
| this._reuse = _reuse; | |||||
| // Avoid an incorrect lint error | |||||
| this.built = false; | |||||
| _keras_style = false; | |||||
| } | |||||
| protected virtual void build(TensorShape inputs_shape) | |||||
| { | |||||
| } | |||||
| public virtual (Tensor, Tensor) apply(Tensor inputs, Tensor training = null) | |||||
| { | |||||
| var results = __call__(inputs, training: training); | |||||
| return (results[0], results[1]); | |||||
| } | |||||
| public Tensors __call__(Tensors inputs, | |||||
| Tensor state = null, | |||||
| Tensor training = null, | |||||
| VariableScope scope = null) | |||||
| { | |||||
| _set_scope(scope); | |||||
| _graph = ops._get_graph_from_inputs(inputs, graph: _graph); | |||||
| variable_scope scope_context_manager = null; | |||||
| if (built) | |||||
| { | |||||
| scope_context_manager = tf.variable_scope(_scope, | |||||
| reuse: true, | |||||
| auxiliary_name_scope: false); | |||||
| } | |||||
| else | |||||
| { | |||||
| scope_context_manager = tf.variable_scope(_scope, | |||||
| reuse: _reuse, | |||||
| auxiliary_name_scope: false); | |||||
| } | |||||
| Tensors outputs = null; | |||||
| tf_with(scope_context_manager, scope2 => | |||||
| { | |||||
| _current_scope = scope2; | |||||
| // Actually call layer | |||||
| }); | |||||
| // Update global default collections. | |||||
| return outputs; | |||||
| } | |||||
| protected virtual void _add_elements_to_collection(Operation[] elements, string[] collection_list) | |||||
| { | |||||
| foreach (var name in collection_list) | |||||
| { | |||||
| var collection = ops.get_collection_ref<Operation>(name); | |||||
| foreach (var element in elements) | |||||
| if (!collection.Contains(element)) | |||||
| collection.Add(element); | |||||
| } | |||||
| } | |||||
| /// <summary> | |||||
| /// Adds a new variable to the layer, or gets an existing one; returns it. | |||||
| /// </summary> | |||||
| /// <param name="name"></param> | |||||
| /// <param name="shape"></param> | |||||
| /// <param name="dtype"></param> | |||||
| /// <param name="initializer"></param> | |||||
| /// <param name="trainable"></param> | |||||
| /// <param name="synchronization"></param> | |||||
| /// <param name="aggregation"></param> | |||||
| /// <returns></returns> | |||||
| protected virtual IVariableV1 add_weight(string name, | |||||
| int[] shape, | |||||
| TF_DataType dtype = TF_DataType.DtInvalid, | |||||
| IInitializer initializer = null, | |||||
| bool trainable = true, | |||||
| VariableSynchronization synchronization = VariableSynchronization.Auto, | |||||
| VariableAggregation aggregation = VariableAggregation.None) | |||||
| { | |||||
| var default_graph = ops.get_default_graph(); | |||||
| Graph init_graph = null; | |||||
| IVariableV1[] existing_variables = null; | |||||
| if (synchronization == VariableSynchronization.OnRead) | |||||
| trainable = false; | |||||
| if (default_graph.building_function) | |||||
| { | |||||
| throw new NotImplementedException("add_weight"); | |||||
| } | |||||
| else | |||||
| { | |||||
| init_graph = default_graph; | |||||
| existing_variables = variables.global_variables().ToArray(); | |||||
| } | |||||
| if (dtype == TF_DataType.DtInvalid) | |||||
| dtype = TF_DataType.TF_FLOAT; | |||||
| _set_scope(); | |||||
| var reuse = built || (_reuse != null && _reuse.Value); | |||||
| return tf.Variable(0); | |||||
| } | |||||
| protected string _name_scope() | |||||
| { | |||||
| return _current_scope.original_name_scope; | |||||
| } | |||||
| protected void _set_scope(VariableScope scope = null) | |||||
| { | |||||
| if (_scope == null) | |||||
| { | |||||
| if (_reuse.HasValue && _reuse.Value) | |||||
| { | |||||
| throw new NotImplementedException("_set_scope _reuse.HasValue"); | |||||
| /*with(tf.variable_scope(scope == null ? _base_name : scope), | |||||
| captured_scope => _scope = captured_scope);*/ | |||||
| } | |||||
| else | |||||
| { | |||||
| } | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| @@ -15,6 +15,9 @@ | |||||
| ******************************************************************************/ | ******************************************************************************/ | ||||
| using System; | using System; | ||||
| using System.Collections.Generic; | |||||
| using Tensorflow.Keras; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using Tensorflow.Operations; | using Tensorflow.Operations; | ||||
| using Tensorflow.Util; | using Tensorflow.Util; | ||||
| using static Tensorflow.Binding; | using static Tensorflow.Binding; | ||||
| @@ -42,7 +45,7 @@ namespace Tensorflow | |||||
| /// matching structure of Tensors having shape `[batch_size].concatenate(s)` | /// matching structure of Tensors having shape `[batch_size].concatenate(s)` | ||||
| /// for each `s` in `self.batch_size`. | /// for each `s` in `self.batch_size`. | ||||
| /// </summary> | /// </summary> | ||||
| public abstract class RnnCell : Layers.Layer | |||||
| public abstract class RnnCell : ILayer | |||||
| { | { | ||||
| /// <summary> | /// <summary> | ||||
| /// Attribute that indicates whether the cell is a TF RNN cell, due the slight | /// Attribute that indicates whether the cell is a TF RNN cell, due the slight | ||||
| @@ -52,14 +55,24 @@ namespace Tensorflow | |||||
| public virtual object state_size { get; } | public virtual object state_size { get; } | ||||
| public virtual int output_size { get; } | public virtual int output_size { get; } | ||||
| public string Name { get => throw new NotImplementedException(); set => throw new NotImplementedException(); } | |||||
| public List<INode> InboundNodes => throw new NotImplementedException(); | |||||
| public List<INode> OutboundNodes => throw new NotImplementedException(); | |||||
| public List<ILayer> Layers => throw new NotImplementedException(); | |||||
| public bool Trainable => throw new NotImplementedException(); | |||||
| public List<IVariableV1> trainable_variables => throw new NotImplementedException(); | |||||
| public TensorShape output_shape => throw new NotImplementedException(); | |||||
| public RnnCell(bool trainable = true, | public RnnCell(bool trainable = true, | ||||
| string name = null, | string name = null, | ||||
| TF_DataType dtype = TF_DataType.DtInvalid, | TF_DataType dtype = TF_DataType.DtInvalid, | ||||
| bool? _reuse = null) : base(trainable: trainable, | |||||
| name: name, | |||||
| dtype: dtype, | |||||
| _reuse: _reuse) | |||||
| bool? _reuse = null) | |||||
| { | { | ||||
| _is_tf_rnn_cell = true; | _is_tf_rnn_cell = true; | ||||
| } | } | ||||
| @@ -109,5 +122,15 @@ namespace Tensorflow | |||||
| throw new NotImplementedException("_zero_state_tensors"); | throw new NotImplementedException("_zero_state_tensors"); | ||||
| } | } | ||||
| public Tensors Apply(Tensors inputs, Tensor state = null, bool is_training = false) | |||||
| { | |||||
| throw new NotImplementedException(); | |||||
| } | |||||
| public int count_params() | |||||
| { | |||||
| throw new NotImplementedException(); | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| @@ -363,8 +363,8 @@ namespace Tensorflow.Operations | |||||
| Tensor[] outputs = null; | Tensor[] outputs = null; | ||||
| if (sequence_length != null) | if (sequence_length != null) | ||||
| throw new NotImplementedException("sequence_length != null"); | throw new NotImplementedException("sequence_length != null"); | ||||
| else | |||||
| outputs = cell.__call__(input_t_t, state: state1); | |||||
| /*else | |||||
| outputs = cell.__call__(input_t_t, state: state1);*/ | |||||
| var (output, new_state) = (outputs[0], outputs[1]); | var (output, new_state) = (outputs[0], outputs[1]); | ||||
| // Keras cells always wrap state as list, even if it's a single tensor. | // Keras cells always wrap state as list, even if it's a single tensor. | ||||
| @@ -24,7 +24,7 @@ namespace Tensorflow | |||||
| { | { | ||||
| public class nn_ops | public class nn_ops | ||||
| { | { | ||||
| internal static ConvolutionInternal convolution_internal(string padding, | |||||
| public static ConvolutionInternal convolution_internal(string padding, | |||||
| int[] strides, | int[] strides, | ||||
| int[] dilation_rate, | int[] dilation_rate, | ||||
| string name = null, | string name = null, | ||||
| @@ -87,8 +87,4 @@ TensorFlow .NET v0.30 is focused on making more Keras API work including: | |||||
| <PackageReference Include="NumSharp.Lite" Version="0.1.9" /> | <PackageReference Include="NumSharp.Lite" Version="0.1.9" /> | ||||
| <PackageReference Include="Protobuf.Text" Version="0.4.0" /> | <PackageReference Include="Protobuf.Text" Version="0.4.0" /> | ||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | |||||
| <Folder Include="Keras\Initializers\" /> | |||||
| </ItemGroup> | |||||
| </Project> | </Project> | ||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras | |||||
| { | |||||
| class Activations | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,35 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class Densenet | |||||
| { | |||||
| public static Tensor dense_block(Tensor x, int blocks, string name) => throw new NotImplementedException(); | |||||
| public static Tensor transition_block(Tensor x, float reduction, string name) => throw new NotImplementedException(); | |||||
| public static Tensor conv_block(Tensor x, float growth_rate, string name) => throw new NotImplementedException(); | |||||
| public static Model DenseNet(int blocks, bool include_top=true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model DenseNet121(int blocks, bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model DenseNet169(int blocks, bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model DenseNet201(int blocks, bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,60 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class BlockArg | |||||
| { | |||||
| } | |||||
| public class Efficientnet | |||||
| { | |||||
| public static Model EfficientNet(float width_coefficient, float depth_coefficient, int default_size, float dropout_rate = 0.2f, | |||||
| float drop_connect_rate = 0.2f, int depth_divisor = 8, string activation = "swish", | |||||
| BlockArg[] blocks_args = null, string model_name = "efficientnet", bool include_top = true, | |||||
| string weights = "imagenet", Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor block(Tensor inputs, string activation= "swish", float drop_rate= 0f,string name= "", | |||||
| int filters_in= 32, int filters_out= 16, int kernel_size= 3, int strides= 1, | |||||
| int expand_ratio= 1, float se_ratio= 0, bool id_skip= true) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB0(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB1(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB2(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB3(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB4(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB5(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB6(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model EfficientNetB7(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,22 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class ImagenetUtils | |||||
| { | |||||
| public static Tensor preprocess_input(Tensor x, string data_format= null, string mode= "caffe") => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top= 5) => throw new NotImplementedException(); | |||||
| public static Tensor _preprocess_numpy_input(Tensor x, string data_format, string mode) => throw new NotImplementedException(); | |||||
| public static Tensor _preprocess_symbolic_input(Tensor x, string data_format, string mode) => throw new NotImplementedException(); | |||||
| public static TensorShape obtain_input_shape(TensorShape input_shape, int default_size, int min_size, | |||||
| string data_format, bool require_flatten, string weights= null) => throw new NotImplementedException(); | |||||
| public static ((int, int), (int, int)) correct_pad(Tensor inputs, (int, int) kernel_size) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,22 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class InceptionResnetV2 | |||||
| { | |||||
| public static Model InceptionResNetV2(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor conv2d_bn(Tensor x, int filters, (int, int) kernel_size, (int, int) strides, string padding= "same", | |||||
| string activation= "relu", bool use_bias= false, string name= null) => throw new NotImplementedException(); | |||||
| public static Tensor inception_resnet_block(Tensor x, float scale, string block_type, int block_idx, string activation= "relu") => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,19 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class InceptionV3 | |||||
| { | |||||
| public static Model Inceptionv3(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor conv2d_bn(Tensor x, int filters, int num_row, int num_col, string padding = "same", (int, int)? strides = null, string name = null) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,18 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class Mobilenet | |||||
| { | |||||
| public static Model MobileNet(TensorShape input_shape= null, float alpha= 1.0f, int depth_multiplier= 1, float dropout= 1e-3f, | |||||
| bool include_top= true, string weights= "imagenet", Tensor input_tensor= null, string pooling= null, int classes= 1000) => throw new NotImplementedException(); | |||||
| public static Tensor conv2d_bn(Tensor x, int filters, float alpha, (int, int)? kernel = null, (int, int)? strides = null) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,21 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class MobilenetV2 | |||||
| { | |||||
| public static Model MobileNetV2(TensorShape input_shape = null, float alpha = 1.0f, bool include_top = true, | |||||
| string weights = "imagenet", Tensor input_tensor = null, string pooling = null, | |||||
| int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor _inverted_res_block(Tensor inputs, int expansion, (int, int) stride, float alpha, int filters, string block_id) => throw new NotImplementedException(); | |||||
| public static Tensor _make_divisible(Tensor v, Tensor divisor, Tensor min_value= null) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,31 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class Nasnet | |||||
| { | |||||
| public static Model NASNet(TensorShape input_shape = null, int penultimate_filters = 4032, int num_blocks = 6, int stem_block_filters = 96, | |||||
| bool skip_reduction = true, int filter_multiplier = 2, bool include_top = true, string weights = null, | |||||
| Tensor input_tensor = null, string pooling = null, int classes = 1000, int? default_size = null) => throw new NotImplementedException(); | |||||
| public static Model NASNetMobile(TensorShape input_shape = null, bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model NASNetLarge(TensorShape input_shape = null, bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor _separable_conv_block(Tensor ip, int filters, (int, int)? kernel_size= null, (int, int)? strides= null, string block_id= null) => throw new NotImplementedException(); | |||||
| public static Tensor _adjust_block(Tensor p, Tensor ip, int filters, string block_id= null) => throw new NotImplementedException(); | |||||
| public static Tensor _normal_a_cell(Tensor p, Tensor ip, int filters, string block_id = null) => throw new NotImplementedException(); | |||||
| public static Tensor _reduction_a_cell(Tensor p, Tensor ip, int filters, string block_id = null) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,41 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class Resnet | |||||
| { | |||||
| public static Model ResNet(Func<Tensor, Tensor> stack_fn, bool preact, bool use_bias, string model_name= "resnet", bool include_top= true, | |||||
| string weights= "imagenet", Tensor input_tensor= null, TensorShape input_shape= null, string pooling= null, | |||||
| int classes= 1000) => throw new NotImplementedException(); | |||||
| public static Tensor block1(Tensor x, int filters, int kernel_size= 3, int stride= 1, bool conv_shortcut= true, string name= null) => throw new NotImplementedException(); | |||||
| public static Tensor stack1(Tensor x, int filters, int blocks, int stride1 = 2, string name = null) => throw new NotImplementedException(); | |||||
| public static Tensor block2(Tensor x, int filters, int kernel_size = 3, int stride = 1, bool conv_shortcut = true, string name = null) => throw new NotImplementedException(); | |||||
| public static Tensor stack2(Tensor x, int filters, int blocks, int stride1 = 2, string name = null) => throw new NotImplementedException(); | |||||
| public static Tensor block3(Tensor x, int filters, int kernel_size = 3, int stride = 1, int groups = 32, bool conv_shortcut = true, string name = null) => throw new NotImplementedException(); | |||||
| public static Tensor stack3(Tensor x, int filters, int blocks, int stride1 = 2, int groups = 32, string name = null) => throw new NotImplementedException(); | |||||
| public static Model ResNet50(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model ResNet101(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model ResNet152(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,25 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class ResnetV2 | |||||
| { | |||||
| public static Model ResNet50V2(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model ResNet101V2(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Model ResNet152V2(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,17 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class Vgg16 | |||||
| { | |||||
| public static Model VGG16(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,17 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class Vgg19 | |||||
| { | |||||
| public static Model VGG19(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,17 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Applications | |||||
| { | |||||
| public class Xception | |||||
| { | |||||
| public static Model XCeption(bool include_top = true, string weights = "imagenet", | |||||
| Tensor input_tensor = null, TensorShape input_shape = null, | |||||
| string pooling = null, int classes = 1000) => throw new NotImplementedException(); | |||||
| public static Tensor preprocess_input(Tensor x, string data_format = null) => throw new NotImplementedException(); | |||||
| public static Tensor decode_predictions(Tensor preds, int top = 5) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,29 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras | |||||
| { | |||||
| public class Args | |||||
| { | |||||
| private List<object> args = new List<object>(); | |||||
| public object this[int index] | |||||
| { | |||||
| get | |||||
| { | |||||
| return args.Count < index ? args[index] : null; | |||||
| } | |||||
| } | |||||
| public T Get<T>(int index) | |||||
| { | |||||
| return args.Count < index ? (T)args[index] : default(T); | |||||
| } | |||||
| public void Add<T>(T arg) | |||||
| { | |||||
| args.Add(arg); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras | |||||
| { | |||||
| class Backend | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras | |||||
| { | |||||
| class BackendConfig | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class BaseLogger | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class CSVLogger | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class Callback | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class CallbackList | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class EarlyStopping | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class History | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class LambdaCallback | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class LearningRateScheduler | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class ModelCheckpoint | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class ProgbarLogger | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class ReduceLROnPlateau | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class RemoteMonitor | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class TensorBoard | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class TensorBoardV1 | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| class TerminateOnNaN | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Constraints | |||||
| { | |||||
| public abstract class ConstraintBase | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Constraints | |||||
| { | |||||
| class MaxNorm | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Constraints | |||||
| { | |||||
| class MinMaxNorm | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Constraints | |||||
| { | |||||
| class NonNeg | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Constraints | |||||
| { | |||||
| class RadialConstraint | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Constraints | |||||
| { | |||||
| class UnitNorm | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,13 +0,0 @@ | |||||
| using Tensorflow; | |||||
| using static Tensorflow.Binding; | |||||
| namespace Keras | |||||
| { | |||||
| public static class Keras | |||||
| { | |||||
| public static Tensor create_tensor(int[] shape, float mean = 0, float stddev = 1, TF_DataType dtype = TF_DataType.TF_FLOAT, int? seed = null, string name = null) | |||||
| { | |||||
| return tf.truncated_normal(shape: shape, mean: mean, stddev: stddev, dtype: dtype, seed: seed, name: name); | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -1,11 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class BostonHousing | |||||
| { | |||||
| public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path = "boston_housing.npz", float test_split = 0.2f, int seed = 113) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,11 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class Cifar | |||||
| { | |||||
| public (Tensor, Tensor) load_batch(string fpath, string label_key = "labels") => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,11 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class Cifar10 | |||||
| { | |||||
| public static ((Tensor, Tensor), (Tensor, Tensor)) load_data() => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,11 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class Cifar100 | |||||
| { | |||||
| public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string label_mode = "fine") => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,11 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class FashionMNIST | |||||
| { | |||||
| public static ((Tensor, Tensor), (Tensor, Tensor)) load_data() => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,15 +0,0 @@ | |||||
| using Newtonsoft.Json.Linq; | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class IMDB | |||||
| { | |||||
| public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path= "imdb.npz", int? num_words= null, int skip_top= 0, int? maxlen= null, | |||||
| int seed= 113,int start_char= 1, int oov_char= 2, int index_from= 3) => throw new NotImplementedException(); | |||||
| public static JObject get_word_index(string path= "imdb_word_index.json") => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,11 +1,74 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| /***************************************************************************** | |||||
| Copyright 2020 Haiping Chen. All Rights Reserved. | |||||
| Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| you may not use this file except in compliance with the License. | |||||
| You may obtain a copy of the License at | |||||
| http://www.apache.org/licenses/LICENSE-2.0 | |||||
| Unless required by applicable law or agreed to in writing, software | |||||
| distributed under the License is distributed on an "AS IS" BASIS, | |||||
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| See the License for the specific language governing permissions and | |||||
| limitations under the License. | |||||
| ******************************************************************************/ | |||||
| using NumSharp; | |||||
| using System; | |||||
| using System.IO; | |||||
| using System.Net; | |||||
| namespace Tensorflow.Keras.Datasets | namespace Tensorflow.Keras.Datasets | ||||
| { | { | ||||
| public class MNIST | |||||
| public class Mnist | |||||
| { | { | ||||
| public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path = "mnist.npz") => throw new NotImplementedException(); | |||||
| string origin_folder = "https://storage.googleapis.com/tensorflow/tf-keras-datasets/"; | |||||
| string file_name = "mnist.npz"; | |||||
| /// <summary> | |||||
| /// Loads the [MNIST dataset](http://yann.lecun.com/exdb/mnist/). | |||||
| /// </summary> | |||||
| /// <returns></returns> | |||||
| public DatasetPass load_data() | |||||
| { | |||||
| var file = Download(); | |||||
| var bytes = File.ReadAllBytes(file); | |||||
| var datax = LoadX(bytes); | |||||
| var datay = LoadY(bytes); | |||||
| return new DatasetPass | |||||
| { | |||||
| Train = (datax.Item1, datay.Item1), | |||||
| Test = (datax.Item2, datay.Item2) | |||||
| }; | |||||
| } | |||||
| (NDArray, NDArray) LoadX(byte[] bytes) | |||||
| { | |||||
| var y = np.Load_Npz<byte[,,]>(bytes); | |||||
| return (y["x_train.npy"], y["x_test.npy"]); | |||||
| } | |||||
| (NDArray, NDArray) LoadY(byte[] bytes) | |||||
| { | |||||
| var y = np.Load_Npz<byte[]>(bytes); | |||||
| return (y["y_train.npy"], y["y_test.npy"]); | |||||
| } | |||||
| string Download() | |||||
| { | |||||
| var fileSaveTo = Path.Combine(Path.GetTempPath(), file_name); | |||||
| if (File.Exists(fileSaveTo)) | |||||
| { | |||||
| Console.WriteLine($"The file {fileSaveTo} already exists"); | |||||
| return fileSaveTo; | |||||
| } | |||||
| using var wc = new WebClient(); | |||||
| wc.DownloadFileTaskAsync(origin_folder + file_name, fileSaveTo).Wait(); | |||||
| return fileSaveTo; | |||||
| } | |||||
| } | } | ||||
| } | } | ||||
| @@ -1,12 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Datasets | |||||
| { | |||||
| public class Reuters | |||||
| { | |||||
| public static ((Tensor, Tensor), (Tensor, Tensor)) load_data(string path = "reuters.npz", int? num_words= null, int skip_top= 0, | |||||
| int? maxlen= null,float test_split= 0.2f, int seed= 113,int start_char= 1,int oov_char= 2,int index_from= 3) => throw new NotImplementedException(); | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class DistributedTrainingUtils | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasCorrectnessTestBase | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasDnnCorrectnessTest | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasEmbeddingModelCorrectnessTest | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasImageModelCorrectnessTest | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasOptimizerV2Test | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasPremadeModelsTest | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasRnnModelCorrectnessTest | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasStatefulLstmModelCorrectnessTest | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Distribute | |||||
| { | |||||
| class KerasUtilsTest | |||||
| { | |||||
| } | |||||
| } | |||||