| @@ -20,7 +20,7 @@ | |||||
| </PropertyGroup> | </PropertyGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.10.0" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.11.0" /> | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| @@ -0,0 +1,15 @@ | |||||
| namespace Tensorflow.Keras.Engine; | |||||
| public interface ICallback | |||||
| { | |||||
| Dictionary<string, List<float>> history { get; set; } | |||||
| void on_train_begin(); | |||||
| void on_epoch_begin(int epoch); | |||||
| void on_train_batch_begin(long step); | |||||
| void on_train_batch_end(long end_step, Dictionary<string, float> logs); | |||||
| void on_epoch_end(int epoch, Dictionary<string, float> epoch_logs); | |||||
| void on_predict_begin(); | |||||
| void on_predict_batch_begin(long step); | |||||
| void on_predict_batch_end(long end_step, Dictionary<string, Tensors> logs); | |||||
| void on_predict_end(); | |||||
| } | |||||
| @@ -1,6 +1,65 @@ | |||||
| namespace Tensorflow.Keras.Engine | |||||
| using Tensorflow.Functions; | |||||
| using Tensorflow.Keras.Losses; | |||||
| using Tensorflow.Keras.Saving; | |||||
| using Tensorflow.NumPy; | |||||
| namespace Tensorflow.Keras.Engine; | |||||
| public interface IModel : ILayer | |||||
| { | { | ||||
| public interface IModel | |||||
| { | |||||
| } | |||||
| void compile(IOptimizer optimizer = null, | |||||
| ILossFunc loss = null, | |||||
| string[] metrics = null); | |||||
| void compile(string optimizer, string loss, string[] metrics); | |||||
| ICallback fit(NDArray x, NDArray y, | |||||
| int batch_size = -1, | |||||
| int epochs = 1, | |||||
| int verbose = 1, | |||||
| float validation_split = 0f, | |||||
| bool shuffle = true, | |||||
| int initial_epoch = 0, | |||||
| int max_queue_size = 10, | |||||
| int workers = 1, | |||||
| bool use_multiprocessing = false); | |||||
| void save(string filepath, | |||||
| bool overwrite = true, | |||||
| bool include_optimizer = true, | |||||
| string save_format = "tf", | |||||
| SaveOptions? options = null, | |||||
| ConcreteFunction? signatures = null, | |||||
| bool save_traces = true); | |||||
| void save_weights(string filepath, | |||||
| bool overwrite = true, | |||||
| string save_format = null, | |||||
| object options = null); | |||||
| void load_weights(string filepath, | |||||
| bool by_name = false, | |||||
| bool skip_mismatch = false, | |||||
| object options = null); | |||||
| void evaluate(NDArray x, NDArray y, | |||||
| int batch_size = -1, | |||||
| int verbose = 1, | |||||
| int steps = -1, | |||||
| int max_queue_size = 10, | |||||
| int workers = 1, | |||||
| bool use_multiprocessing = false, | |||||
| bool return_dict = false); | |||||
| Tensors predict(Tensor x, | |||||
| int batch_size = -1, | |||||
| int verbose = 0, | |||||
| int steps = -1, | |||||
| int max_queue_size = 10, | |||||
| int workers = 1, | |||||
| bool use_multiprocessing = false); | |||||
| void summary(int line_length = -1, float[] positions = null); | |||||
| IKerasConfig get_config(); | |||||
| } | } | ||||
| @@ -0,0 +1,13 @@ | |||||
| namespace Tensorflow.Keras.Engine; | |||||
| public interface IOptimizer | |||||
| { | |||||
| Tensor[] aggregate_gradients(IEnumerable<(Tensor, IVariableV1)> grads_and_vars); | |||||
| Tensor[] clip_gradients(Tensor[] grads); | |||||
| void apply_gradients((Tensor, ResourceVariable) grads_and_vars, | |||||
| string name = null, | |||||
| bool experimental_aggregate_gradients = true); | |||||
| void apply_gradients(IEnumerable<(Tensor, ResourceVariable)> grads_and_vars, | |||||
| string name = null, | |||||
| bool experimental_aggregate_gradients = true); | |||||
| } | |||||
| @@ -1,6 +1,7 @@ | |||||
| using System; | using System; | ||||
| using System.Collections.Generic; | using System.Collections.Generic; | ||||
| using System.Text; | using System.Text; | ||||
| using Tensorflow.Keras.Engine; | |||||
| using Tensorflow.Keras.Layers; | using Tensorflow.Keras.Layers; | ||||
| using Tensorflow.Keras.Losses; | using Tensorflow.Keras.Losses; | ||||
| using Tensorflow.Keras.Metrics; | using Tensorflow.Keras.Metrics; | ||||
| @@ -13,5 +14,13 @@ namespace Tensorflow.Keras | |||||
| public ILossesApi losses { get; } | public ILossesApi losses { get; } | ||||
| public IMetricsApi metrics { get; } | public IMetricsApi metrics { get; } | ||||
| public IInitializersApi initializers { get; } | public IInitializersApi initializers { get; } | ||||
| /// <summary> | |||||
| /// `Model` groups layers into an object with training and inference features. | |||||
| /// </summary> | |||||
| /// <param name="input"></param> | |||||
| /// <param name="output"></param> | |||||
| /// <returns></returns> | |||||
| public IModel Model(Tensors inputs, Tensors outputs, string name = null); | |||||
| } | } | ||||
| } | } | ||||
| @@ -1,6 +1,4 @@ | |||||
| using System.Collections.Generic; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using Tensorflow.Keras.Saving; | using Tensorflow.Keras.Saving; | ||||
| using Tensorflow.Training; | using Tensorflow.Training; | ||||
| @@ -15,7 +13,7 @@ namespace Tensorflow.Keras | |||||
| List<ILayer> Layers { get; } | List<ILayer> Layers { get; } | ||||
| List<INode> InboundNodes { get; } | List<INode> InboundNodes { get; } | ||||
| List<INode> OutboundNodes { get; } | List<INode> OutboundNodes { get; } | ||||
| Tensors Apply(Tensors inputs, Tensor state = null, bool is_training = false); | |||||
| Tensors Apply(Tensors inputs, Tensor state = null, bool training = false); | |||||
| List<IVariableV1> TrainableVariables { get; } | List<IVariableV1> TrainableVariables { get; } | ||||
| List<IVariableV1> TrainableWeights { get; } | List<IVariableV1> TrainableWeights { get; } | ||||
| List<IVariableV1> NonTrainableWeights { get; } | List<IVariableV1> NonTrainableWeights { get; } | ||||
| @@ -5,7 +5,7 @@ | |||||
| <AssemblyName>Tensorflow.Binding</AssemblyName> | <AssemblyName>Tensorflow.Binding</AssemblyName> | ||||
| <RootNamespace>Tensorflow</RootNamespace> | <RootNamespace>Tensorflow</RootNamespace> | ||||
| <TargetTensorFlow>2.10.0</TargetTensorFlow> | <TargetTensorFlow>2.10.0</TargetTensorFlow> | ||||
| <Version>0.100.4</Version> | |||||
| <Version>1.0.0</Version> | |||||
| <LangVersion>10.0</LangVersion> | <LangVersion>10.0</LangVersion> | ||||
| <Nullable>enable</Nullable> | <Nullable>enable</Nullable> | ||||
| <Authors>Haiping Chen, Meinrad Recheis, Eli Belash</Authors> | <Authors>Haiping Chen, Meinrad Recheis, Eli Belash</Authors> | ||||
| @@ -20,7 +20,7 @@ | |||||
| <Description>Google's TensorFlow full binding in .NET Standard. | <Description>Google's TensorFlow full binding in .NET Standard. | ||||
| Building, training and infering deep learning models. | Building, training and infering deep learning models. | ||||
| https://tensorflownet.readthedocs.io</Description> | https://tensorflownet.readthedocs.io</Description> | ||||
| <AssemblyVersion>0.100.4.0</AssemblyVersion> | |||||
| <AssemblyVersion>1.0.0.0</AssemblyVersion> | |||||
| <PackageReleaseNotes> | <PackageReleaseNotes> | ||||
| tf.net 0.100.x and above are based on tensorflow native 2.10.0 | tf.net 0.100.x and above are based on tensorflow native 2.10.0 | ||||
| @@ -38,7 +38,7 @@ https://tensorflownet.readthedocs.io</Description> | |||||
| tf.net 0.7x.x aligns with TensorFlow v2.7.x native library. | tf.net 0.7x.x aligns with TensorFlow v2.7.x native library. | ||||
| tf.net 0.10x.x aligns with TensorFlow v2.10.x native library. | tf.net 0.10x.x aligns with TensorFlow v2.10.x native library. | ||||
| </PackageReleaseNotes> | </PackageReleaseNotes> | ||||
| <FileVersion>0.100.4.0</FileVersion> | |||||
| <FileVersion>1.0.0.0</FileVersion> | |||||
| <PackageLicenseFile>LICENSE</PackageLicenseFile> | <PackageLicenseFile>LICENSE</PackageLicenseFile> | ||||
| <PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance> | <PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance> | ||||
| <SignAssembly>true</SignAssembly> | <SignAssembly>true</SignAssembly> | ||||
| @@ -1,63 +1,63 @@ | |||||
| using System; | using System; | ||||
| using System.Collections.Generic; | using System.Collections.Generic; | ||||
| using System.Text; | using System.Text; | ||||
| using Tensorflow.Keras.Engine; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| namespace Tensorflow.Keras.Callbacks; | |||||
| public class CallbackList | |||||
| { | { | ||||
| public class CallbackList | |||||
| { | |||||
| List<ICallback> callbacks = new List<ICallback>(); | |||||
| public History History => callbacks[0] as History; | |||||
| public CallbackList(CallbackParams parameters) | |||||
| { | |||||
| callbacks.Add(new History(parameters)); | |||||
| callbacks.Add(new ProgbarLogger(parameters)); | |||||
| } | |||||
| public void on_train_begin() | |||||
| { | |||||
| callbacks.ForEach(x => x.on_train_begin()); | |||||
| } | |||||
| public void on_epoch_begin(int epoch) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_epoch_begin(epoch)); | |||||
| } | |||||
| public void on_train_batch_begin(long step) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_train_batch_begin(step)); | |||||
| } | |||||
| public void on_train_batch_end(long end_step, Dictionary<string, float> logs) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_train_batch_end(end_step, logs)); | |||||
| } | |||||
| public void on_epoch_end(int epoch, Dictionary<string, float> epoch_logs) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_epoch_end(epoch, epoch_logs)); | |||||
| } | |||||
| public void on_predict_begin() | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_begin()); | |||||
| } | |||||
| public void on_predict_batch_begin(long step) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_batch_begin(step)); | |||||
| } | |||||
| public void on_predict_batch_end(long end_step, Dictionary<string, Tensors> logs) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_batch_end(end_step, logs)); | |||||
| } | |||||
| public void on_predict_end() | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_end()); | |||||
| } | |||||
| List<ICallback> callbacks = new List<ICallback>(); | |||||
| public History History => callbacks[0] as History; | |||||
| public CallbackList(CallbackParams parameters) | |||||
| { | |||||
| callbacks.Add(new History(parameters)); | |||||
| callbacks.Add(new ProgbarLogger(parameters)); | |||||
| } | |||||
| public void on_train_begin() | |||||
| { | |||||
| callbacks.ForEach(x => x.on_train_begin()); | |||||
| } | |||||
| public void on_epoch_begin(int epoch) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_epoch_begin(epoch)); | |||||
| } | |||||
| public void on_train_batch_begin(long step) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_train_batch_begin(step)); | |||||
| } | |||||
| public void on_train_batch_end(long end_step, Dictionary<string, float> logs) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_train_batch_end(end_step, logs)); | |||||
| } | |||||
| public void on_epoch_end(int epoch, Dictionary<string, float> epoch_logs) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_epoch_end(epoch, epoch_logs)); | |||||
| } | |||||
| public void on_predict_begin() | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_begin()); | |||||
| } | |||||
| public void on_predict_batch_begin(long step) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_batch_begin(step)); | |||||
| } | |||||
| public void on_predict_batch_end(long end_step, Dictionary<string, Tensors> logs) | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_batch_end(end_step, logs)); | |||||
| } | |||||
| public void on_predict_end() | |||||
| { | |||||
| callbacks.ForEach(x => x.on_predict_end()); | |||||
| } | } | ||||
| } | } | ||||
| @@ -1,73 +1,70 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| using Tensorflow.Keras.Engine; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| namespace Tensorflow.Keras.Callbacks; | |||||
| public class History : ICallback | |||||
| { | { | ||||
| public class History : ICallback | |||||
| List<int> epochs; | |||||
| CallbackParams _parameters; | |||||
| public Dictionary<string, List<float>> history { get; set; } | |||||
| public History(CallbackParams parameters) | |||||
| { | { | ||||
| List<int> epochs; | |||||
| CallbackParams _parameters; | |||||
| public Dictionary<string, List<float>> history { get; set; } | |||||
| _parameters = parameters; | |||||
| } | |||||
| public History(CallbackParams parameters) | |||||
| { | |||||
| _parameters = parameters; | |||||
| } | |||||
| public void on_train_begin() | |||||
| { | |||||
| epochs = new List<int>(); | |||||
| history = new Dictionary<string, List<float>>(); | |||||
| } | |||||
| public void on_train_begin() | |||||
| { | |||||
| epochs = new List<int>(); | |||||
| history = new Dictionary<string, List<float>>(); | |||||
| } | |||||
| public void on_epoch_begin(int epoch) | |||||
| { | |||||
| public void on_epoch_begin(int epoch) | |||||
| { | |||||
| } | |||||
| } | |||||
| public void on_train_batch_begin(long step) | |||||
| { | |||||
| } | |||||
| public void on_train_batch_begin(long step) | |||||
| { | |||||
| } | |||||
| public void on_train_batch_end(long end_step, Dictionary<string, float> logs) | |||||
| { | |||||
| } | |||||
| public void on_train_batch_end(long end_step, Dictionary<string, float> logs) | |||||
| { | |||||
| } | |||||
| public void on_epoch_end(int epoch, Dictionary<string, float> epoch_logs) | |||||
| { | |||||
| epochs.Add(epoch); | |||||
| public void on_epoch_end(int epoch, Dictionary<string, float> epoch_logs) | |||||
| foreach (var log in epoch_logs) | |||||
| { | { | ||||
| epochs.Add(epoch); | |||||
| foreach (var log in epoch_logs) | |||||
| if (!history.ContainsKey(log.Key)) | |||||
| { | { | ||||
| if (!history.ContainsKey(log.Key)) | |||||
| { | |||||
| history[log.Key] = new List<float>(); | |||||
| } | |||||
| history[log.Key].Add((float)log.Value); | |||||
| history[log.Key] = new List<float>(); | |||||
| } | } | ||||
| history[log.Key].Add((float)log.Value); | |||||
| } | } | ||||
| } | |||||
| public void on_predict_begin() | |||||
| { | |||||
| epochs = new List<int>(); | |||||
| history = new Dictionary<string, List<float>>(); | |||||
| } | |||||
| public void on_predict_begin() | |||||
| { | |||||
| epochs = new List<int>(); | |||||
| history = new Dictionary<string, List<float>>(); | |||||
| } | |||||
| public void on_predict_batch_begin(long step) | |||||
| { | |||||
| } | |||||
| public void on_predict_batch_begin(long step) | |||||
| { | |||||
| } | |||||
| public void on_predict_batch_end(long end_step, Dictionary<string, Tensors> logs) | |||||
| { | |||||
| } | |||||
| public void on_predict_batch_end(long end_step, Dictionary<string, Tensors> logs) | |||||
| { | |||||
| } | |||||
| public void on_predict_end() | |||||
| { | |||||
| } | |||||
| public void on_predict_end() | |||||
| { | |||||
| } | } | ||||
| } | } | ||||
| @@ -1,19 +0,0 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| namespace Tensorflow.Keras.Callbacks | |||||
| { | |||||
| public interface ICallback | |||||
| { | |||||
| void on_train_begin(); | |||||
| void on_epoch_begin(int epoch); | |||||
| void on_train_batch_begin(long step); | |||||
| void on_train_batch_end(long end_step, Dictionary<string, float> logs); | |||||
| void on_epoch_end(int epoch, Dictionary<string, float> epoch_logs); | |||||
| void on_predict_begin(); | |||||
| void on_predict_batch_begin(long step); | |||||
| void on_predict_batch_end(long end_step, Dictionary<string, Tensors> logs); | |||||
| void on_predict_end(); | |||||
| } | |||||
| } | |||||
| @@ -1,8 +1,5 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Diagnostics; | |||||
| using System.Linq; | |||||
| using System.Text; | |||||
| using System.Diagnostics; | |||||
| using Tensorflow.Keras.Engine; | |||||
| namespace Tensorflow.Keras.Callbacks | namespace Tensorflow.Keras.Callbacks | ||||
| { | { | ||||
| @@ -13,6 +10,8 @@ namespace Tensorflow.Keras.Callbacks | |||||
| CallbackParams _parameters; | CallbackParams _parameters; | ||||
| Stopwatch _sw; | Stopwatch _sw; | ||||
| public Dictionary<string, List<float>> history { get; set; } | |||||
| public ProgbarLogger(CallbackParams parameters) | public ProgbarLogger(CallbackParams parameters) | ||||
| { | { | ||||
| _parameters = parameters; | _parameters = parameters; | ||||
| @@ -348,7 +348,7 @@ namespace Tensorflow.Keras.Engine | |||||
| var layer_inputs = node.MapArguments(tensor_dict); | var layer_inputs = node.MapArguments(tensor_dict); | ||||
| tf.Logger.Debug($"Depth {depth}: {node.Layer}: {node.Layer.Name}"); | tf.Logger.Debug($"Depth {depth}: {node.Layer}: {node.Layer.Name}"); | ||||
| var outputs = node.Layer.Apply(layer_inputs, is_training: training ?? false); | |||||
| var outputs = node.Layer.Apply(layer_inputs, training: training ?? false); | |||||
| foreach (var output in outputs.Where(x => x != null)) | foreach (var output in outputs.Where(x => x != null)) | ||||
| tf.Logger.Information($"Depth {depth}: {node.Layer}: {node.Layer.Name} {output.shape}"); | tf.Logger.Information($"Depth {depth}: {node.Layer}: {node.Layer.Name} {output.shape}"); | ||||
| // Update tensor_dict for next or later input | // Update tensor_dict for next or later input | ||||
| @@ -1,5 +1,4 @@ | |||||
| using System; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Losses; | using Tensorflow.Keras.Losses; | ||||
| using Tensorflow.Keras.Metrics; | using Tensorflow.Keras.Metrics; | ||||
| using Tensorflow.Keras.Optimizers; | using Tensorflow.Keras.Optimizers; | ||||
| @@ -11,7 +10,7 @@ namespace Tensorflow.Keras.Engine | |||||
| LossesContainer compiled_loss; | LossesContainer compiled_loss; | ||||
| MetricsContainer compiled_metrics; | MetricsContainer compiled_metrics; | ||||
| public void compile(OptimizerV2 optimizer = null, | |||||
| public void compile(IOptimizer optimizer = null, | |||||
| ILossFunc loss = null, | ILossFunc loss = null, | ||||
| string[] metrics = null) | string[] metrics = null) | ||||
| { | { | ||||
| @@ -32,7 +31,7 @@ namespace Tensorflow.Keras.Engine | |||||
| _is_compiled = true; | _is_compiled = true; | ||||
| } | } | ||||
| public void compile(OptimizerV2 optimizer = null, | |||||
| public void compile(IOptimizer optimizer = null, | |||||
| ILossFunc loss = null, | ILossFunc loss = null, | ||||
| IMetricFunc[] metrics = null) | IMetricFunc[] metrics = null) | ||||
| { | { | ||||
| @@ -22,7 +22,7 @@ namespace Tensorflow.Keras.Engine | |||||
| /// <param name="verbose"></param> | /// <param name="verbose"></param> | ||||
| /// <param name="validation_split"></param> | /// <param name="validation_split"></param> | ||||
| /// <param name="shuffle"></param> | /// <param name="shuffle"></param> | ||||
| public History fit(NDArray x, NDArray y, | |||||
| public ICallback fit(NDArray x, NDArray y, | |||||
| int batch_size = -1, | int batch_size = -1, | ||||
| int epochs = 1, | int epochs = 1, | ||||
| int verbose = 1, | int verbose = 1, | ||||
| @@ -51,11 +51,11 @@ namespace Tensorflow.Keras.Engine | |||||
| return dict; | return dict; | ||||
| } | } | ||||
| void _minimize(GradientTape tape, OptimizerV2 optimizer, Tensor loss, List<IVariableV1> trainable_variables) | |||||
| void _minimize(GradientTape tape, IOptimizer optimizer, Tensor loss, List<IVariableV1> trainable_variables) | |||||
| { | { | ||||
| var gradients = tape.gradient(loss, trainable_variables); | var gradients = tape.gradient(loss, trainable_variables); | ||||
| gradients = optimizer._aggregate_gradients(zip(gradients, trainable_variables)); | |||||
| gradients = optimizer._clip_gradients(gradients); | |||||
| gradients = optimizer.aggregate_gradients(zip(gradients, trainable_variables)); | |||||
| gradients = optimizer.clip_gradients(gradients); | |||||
| optimizer.apply_gradients(zip(gradients, trainable_variables.Select(x => x as ResourceVariable)), | optimizer.apply_gradients(zip(gradients, trainable_variables.Select(x => x as ResourceVariable)), | ||||
| experimental_aggregate_gradients: false); | experimental_aggregate_gradients: false); | ||||
| @@ -1,13 +1,7 @@ | |||||
| using System.Collections.Generic; | |||||
| using System.Linq; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine.DataAdapters; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Losses; | using Tensorflow.Keras.Losses; | ||||
| using Tensorflow.Keras.Optimizers; | |||||
| using Tensorflow.Keras.Saving.SavedModel; | using Tensorflow.Keras.Saving.SavedModel; | ||||
| using Tensorflow.Train; | using Tensorflow.Train; | ||||
| using static Tensorflow.Binding; | |||||
| using static Tensorflow.KerasApi; | |||||
| namespace Tensorflow.Keras.Engine | namespace Tensorflow.Keras.Engine | ||||
| { | { | ||||
| @@ -25,7 +19,7 @@ namespace Tensorflow.Keras.Engine | |||||
| #pragma warning restore CS0414 // The field 'Model._is_compiled' is assigned but its value is never used | #pragma warning restore CS0414 // The field 'Model._is_compiled' is assigned but its value is never used | ||||
| #pragma warning restore CS0108 // Member hides inherited member; missing new keyword | #pragma warning restore CS0108 // Member hides inherited member; missing new keyword | ||||
| ILossFunc loss; | ILossFunc loss; | ||||
| OptimizerV2 optimizer; | |||||
| IOptimizer optimizer; | |||||
| IVariableV1 _steps_per_execution; | IVariableV1 _steps_per_execution; | ||||
| protected bool _is_graph_network; | protected bool _is_graph_network; | ||||
| protected Tensors inputs; | protected Tensors inputs; | ||||
| @@ -39,7 +33,7 @@ namespace Tensorflow.Keras.Engine | |||||
| public bool IsGraphNetwork => _is_graph_network; | public bool IsGraphNetwork => _is_graph_network; | ||||
| public OptimizerV2 Optimizer | |||||
| public IOptimizer Optimizer | |||||
| { | { | ||||
| get => optimizer; | get => optimizer; | ||||
| set => optimizer = value; | set => optimizer = value; | ||||
| @@ -46,7 +46,7 @@ namespace Tensorflow.Keras | |||||
| /// <param name="input"></param> | /// <param name="input"></param> | ||||
| /// <param name="output"></param> | /// <param name="output"></param> | ||||
| /// <returns></returns> | /// <returns></returns> | ||||
| public Functional Model(Tensors inputs, Tensors outputs, string name = null) | |||||
| public IModel Model(Tensors inputs, Tensors outputs, string name = null) | |||||
| => new Functional(inputs, outputs, name: name); | => new Functional(inputs, outputs, name: name); | ||||
| /// <summary> | /// <summary> | ||||
| @@ -1,6 +0,0 @@ | |||||
| namespace Tensorflow.Keras.Optimizers | |||||
| { | |||||
| public interface IOptimizer | |||||
| { | |||||
| } | |||||
| } | |||||
| @@ -1,10 +1,7 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Linq; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using Tensorflow.Keras.Utils; | using Tensorflow.Keras.Utils; | ||||
| using Tensorflow.Train; | using Tensorflow.Train; | ||||
| using static Tensorflow.Binding; | |||||
| namespace Tensorflow.Keras.Optimizers | namespace Tensorflow.Keras.Optimizers | ||||
| { | { | ||||
| @@ -114,12 +111,12 @@ namespace Tensorflow.Keras.Optimizers | |||||
| }); | }); | ||||
| } | } | ||||
| public Tensor[] _aggregate_gradients(IEnumerable<(Tensor, IVariableV1)> grads_and_vars) | |||||
| public Tensor[] aggregate_gradients(IEnumerable<(Tensor, IVariableV1)> grads_and_vars) | |||||
| { | { | ||||
| return grads_and_vars.Select(x => x.Item1).ToArray(); | return grads_and_vars.Select(x => x.Item1).ToArray(); | ||||
| } | } | ||||
| public Tensor[] _clip_gradients(Tensor[] grads) | |||||
| public Tensor[] clip_gradients(Tensor[] grads) | |||||
| { | { | ||||
| return grads; | return grads; | ||||
| } | } | ||||
| @@ -33,7 +33,7 @@ public partial class KerasSavedModelUtils | |||||
| } | } | ||||
| } | } | ||||
| OptimizerV2? orig_optimizer = null; | |||||
| IOptimizer? orig_optimizer = null; | |||||
| if (!include_optimizer) | if (!include_optimizer) | ||||
| { | { | ||||
| orig_optimizer = model.Optimizer; | orig_optimizer = model.Optimizer; | ||||
| @@ -7,7 +7,7 @@ | |||||
| <Nullable>enable</Nullable> | <Nullable>enable</Nullable> | ||||
| <RootNamespace>Tensorflow.Keras</RootNamespace> | <RootNamespace>Tensorflow.Keras</RootNamespace> | ||||
| <Platforms>AnyCPU;x64</Platforms> | <Platforms>AnyCPU;x64</Platforms> | ||||
| <Version>0.10.4</Version> | |||||
| <Version>1.0.0</Version> | |||||
| <Authors>Haiping Chen</Authors> | <Authors>Haiping Chen</Authors> | ||||
| <Product>Keras for .NET</Product> | <Product>Keras for .NET</Product> | ||||
| <Copyright>Apache 2.0, Haiping Chen 2023</Copyright> | <Copyright>Apache 2.0, Haiping Chen 2023</Copyright> | ||||
| @@ -37,8 +37,8 @@ Keras is an API designed for human beings, not machines. Keras follows best prac | |||||
| <RepositoryType>Git</RepositoryType> | <RepositoryType>Git</RepositoryType> | ||||
| <SignAssembly>true</SignAssembly> | <SignAssembly>true</SignAssembly> | ||||
| <AssemblyOriginatorKeyFile>Open.snk</AssemblyOriginatorKeyFile> | <AssemblyOriginatorKeyFile>Open.snk</AssemblyOriginatorKeyFile> | ||||
| <AssemblyVersion>0.10.4.0</AssemblyVersion> | |||||
| <FileVersion>0.10.4.0</FileVersion> | |||||
| <AssemblyVersion>1.0.0.0</AssemblyVersion> | |||||
| <FileVersion>1.0.0.0</FileVersion> | |||||
| <PackageLicenseFile>LICENSE</PackageLicenseFile> | <PackageLicenseFile>LICENSE</PackageLicenseFile> | ||||
| <Configurations>Debug;Release;GPU</Configurations> | <Configurations>Debug;Release;GPU</Configurations> | ||||
| </PropertyGroup> | </PropertyGroup> | ||||
| @@ -73,7 +73,7 @@ Keras is an API designed for human beings, not machines. Keras follows best prac | |||||
| <PackageReference Include="HDF5-CSharp" Version="1.16.3" /> | <PackageReference Include="HDF5-CSharp" Version="1.16.3" /> | ||||
| <PackageReference Include="MethodBoundaryAspect.Fody" Version="2.0.148" /> | <PackageReference Include="MethodBoundaryAspect.Fody" Version="2.0.148" /> | ||||
| <PackageReference Include="Newtonsoft.Json" Version="13.0.2" /> | <PackageReference Include="Newtonsoft.Json" Version="13.0.2" /> | ||||
| <PackageReference Include="SharpZipLib" Version="1.4.1" /> | |||||
| <PackageReference Include="SharpZipLib" Version="1.4.2" /> | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| @@ -1,10 +1,8 @@ | |||||
| using BenchmarkDotNet.Attributes; | using BenchmarkDotNet.Attributes; | ||||
| using Tensorflow; | |||||
| using Tensorflow.Eager; | |||||
| namespace TensorFlowBenchmark | namespace TensorFlowBenchmark | ||||
| { | { | ||||
| [SimpleJob(launchCount: 1, warmupCount: 1, targetCount: 10)] | |||||
| [SimpleJob(launchCount: 1, warmupCount: 1)] | |||||
| [MinColumn, MaxColumn, MeanColumn, MedianColumn] | [MinColumn, MaxColumn, MeanColumn, MedianColumn] | ||||
| public class TensorBenchmark | public class TensorBenchmark | ||||
| { | { | ||||
| @@ -36,8 +36,8 @@ | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| <PackageReference Include="BenchmarkDotNet" Version="0.13.1" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.7.0" /> | |||||
| <PackageReference Include="BenchmarkDotNet" Version="0.13.5" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.11.0" /> | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| @@ -16,7 +16,7 @@ namespace TensorFlowBenchmark.Unmanaged | |||||
| } | } | ||||
| } | } | ||||
| [SimpleJob(launchCount: 1, warmupCount: 2, targetCount: 10)] | |||||
| [SimpleJob(launchCount: 1, warmupCount: 2)] | |||||
| [MinColumn, MaxColumn, MeanColumn, MedianColumn] | [MinColumn, MaxColumn, MeanColumn, MedianColumn] | ||||
| public unsafe class StructCastBenchmark | public unsafe class StructCastBenchmark | ||||
| { | { | ||||
| @@ -27,11 +27,11 @@ | |||||
| <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | ||||
| <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | ||||
| <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | ||||
| <PackageReference Include="coverlet.collector" Version="3.1.0"> | |||||
| <PackageReference Include="coverlet.collector" Version="3.2.0"> | |||||
| <PrivateAssets>all</PrivateAssets> | <PrivateAssets>all</PrivateAssets> | ||||
| <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> | <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> | ||||
| </PackageReference> | </PackageReference> | ||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.7.0" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.11.0" /> | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| @@ -11,17 +11,17 @@ namespace TensorFlowNET.Keras.UnitTest; | |||||
| [TestClass] | [TestClass] | ||||
| public class GradientTest | public class GradientTest | ||||
| { | { | ||||
| public Model get_actor(int num_states) | |||||
| public IModel get_actor(int num_states) | |||||
| { | { | ||||
| var inputs = keras.layers.Input(shape: num_states); | |||||
| var outputs = keras.layers.Dense(1, activation: keras.activations.Tanh).Apply(inputs); | |||||
| var inputs = tf.keras.layers.Input(shape: num_states); | |||||
| var outputs = tf.keras.layers.Dense(1, activation: keras.activations.Tanh).Apply(inputs); | |||||
| Model model = keras.Model(inputs, outputs); | |||||
| var model = tf.keras.Model(inputs, outputs); | |||||
| return model; | return model; | ||||
| } | } | ||||
| public Model get_critic(int num_states, int num_actions) | |||||
| public IModel get_critic(int num_states, int num_actions) | |||||
| { | { | ||||
| // State as input | // State as input | ||||
| var state_input = keras.layers.Input(shape: num_states); | var state_input = keras.layers.Input(shape: num_states); | ||||
| @@ -33,7 +33,7 @@ public class GradientTest | |||||
| var outputs = keras.layers.Dense(1).Apply(concat); | var outputs = keras.layers.Dense(1).Apply(concat); | ||||
| Model model = keras.Model(new Tensors(state_input, action_input), outputs); | |||||
| var model = tf.keras.Model(new Tensors(state_input, action_input), outputs); | |||||
| model.summary(); | model.summary(); | ||||
| return model; | return model; | ||||
| @@ -22,7 +22,7 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
| Assert.AreEqual(model.Layers.Count, new_model.Layers.Count); | Assert.AreEqual(model.Layers.Count, new_model.Layers.Count); | ||||
| } | } | ||||
| Functional GetFunctionalModel() | |||||
| IModel GetFunctionalModel() | |||||
| { | { | ||||
| // Create a simple model. | // Create a simple model. | ||||
| var inputs = keras.Input(shape: 32); | var inputs = keras.Input(shape: 32); | ||||
| @@ -51,7 +51,7 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
| //Sanity check without multithreading | //Sanity check without multithreading | ||||
| for (int i = 0; i < 2; i++) | for (int i = 0; i < 2; i++) | ||||
| { | { | ||||
| Functional clone = BuildModel(); | |||||
| var clone = BuildModel(); | |||||
| clone.load_weights(savefile); | clone.load_weights(savefile); | ||||
| //Predict something | //Predict something | ||||
| @@ -71,7 +71,7 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
| }); | }); | ||||
| } | } | ||||
| Functional BuildModel() | |||||
| IModel BuildModel() | |||||
| { | { | ||||
| tf.Context.reset_context(); | tf.Context.reset_context(); | ||||
| var inputs = keras.Input(shape: 2); | var inputs = keras.Input(shape: 2); | ||||
| @@ -81,7 +81,7 @@ namespace TensorFlowNET.Keras.UnitTest | |||||
| var outputs = DenseLayer.Apply(inputs); | var outputs = DenseLayer.Apply(inputs); | ||||
| // build keras model | // build keras model | ||||
| Functional model = keras.Model(inputs, outputs, name: Guid.NewGuid().ToString()); | |||||
| var model = tf.keras.Model(inputs, outputs, name: Guid.NewGuid().ToString()); | |||||
| // show model summary | // show model summary | ||||
| model.summary(); | model.summary(); | ||||
| @@ -3,9 +3,7 @@ using System.Collections.Generic; | |||||
| using System.Diagnostics; | using System.Diagnostics; | ||||
| using Tensorflow; | using Tensorflow; | ||||
| using Tensorflow.Keras; | using Tensorflow.Keras; | ||||
| using Tensorflow.Keras.ArgsDefinition; | |||||
| using Tensorflow.Keras.Engine; | using Tensorflow.Keras.Engine; | ||||
| using Tensorflow.Keras.Layers; | |||||
| using Tensorflow.Keras.Losses; | using Tensorflow.Keras.Losses; | ||||
| using Tensorflow.Keras.Optimizers; | using Tensorflow.Keras.Optimizers; | ||||
| using Tensorflow.NumPy; | using Tensorflow.NumPy; | ||||
| @@ -20,14 +18,16 @@ public class SequentialModelSave | |||||
| [TestMethod] | [TestMethod] | ||||
| public void SimpleModelFromAutoCompile() | public void SimpleModelFromAutoCompile() | ||||
| { | { | ||||
| var inputs = new KerasInterface().Input((28, 28, 1)); | |||||
| var x = new Flatten(new FlattenArgs()).Apply(inputs); | |||||
| x = new Dense(new DenseArgs() { Units = 100, Activation = tf.nn.relu }).Apply(x); | |||||
| x = new LayersApi().Dense(units: 10).Apply(x); | |||||
| var outputs = new LayersApi().Softmax(axis: 1).Apply(x); | |||||
| var model = new KerasInterface().Model(inputs, outputs); | |||||
| model.compile(new Adam(0.001f), new LossesApi().SparseCategoricalCrossentropy(), new string[] { "accuracy" }); | |||||
| var inputs = tf.keras.layers.Input((28, 28, 1)); | |||||
| var x = tf.keras.layers.Flatten().Apply(inputs); | |||||
| x = tf.keras.layers.Dense(100, activation: tf.nn.relu).Apply(x); | |||||
| x = tf.keras.layers.Dense(units: 10).Apply(x); | |||||
| var outputs = tf.keras.layers.Softmax(axis: 1).Apply(x); | |||||
| var model = tf.keras.Model(inputs, outputs); | |||||
| model.compile(new Adam(0.001f), | |||||
| tf.keras.losses.SparseCategoricalCrossentropy(), | |||||
| new string[] { "accuracy" }); | |||||
| var data_loader = new MnistModelLoader(); | var data_loader = new MnistModelLoader(); | ||||
| var num_epochs = 1; | var num_epochs = 1; | ||||
| @@ -16,11 +16,11 @@ | |||||
| <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | ||||
| <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | ||||
| <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | ||||
| <PackageReference Include="coverlet.collector" Version="3.1.0"> | |||||
| <PackageReference Include="coverlet.collector" Version="3.2.0"> | |||||
| <PrivateAssets>all</PrivateAssets> | <PrivateAssets>all</PrivateAssets> | ||||
| <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> | <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> | ||||
| </PackageReference> | </PackageReference> | ||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.7.0" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.11.0" /> | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| @@ -1,4 +1,4 @@ | |||||
| <Project Sdk="Microsoft.NET.Sdk"> | |||||
| <Project Sdk="Microsoft.NET.Sdk"> | |||||
| <PropertyGroup> | <PropertyGroup> | ||||
| <TargetFramework>net6.0</TargetFramework> | <TargetFramework>net6.0</TargetFramework> | ||||
| @@ -47,11 +47,11 @@ | |||||
| <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | ||||
| <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | ||||
| <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | ||||
| <PackageReference Include="coverlet.collector" Version="3.1.0"> | |||||
| <PackageReference Include="coverlet.collector" Version="3.2.0"> | |||||
| <PrivateAssets>all</PrivateAssets> | <PrivateAssets>all</PrivateAssets> | ||||
| <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> | <IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets> | ||||
| </PackageReference> | </PackageReference> | ||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.7.0" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.11.0" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist-Lite" Version="2.6.0" /> | <PackageReference Include="SciSharp.TensorFlow.Redist-Lite" Version="2.6.0" /> | ||||
| </ItemGroup> | </ItemGroup> | ||||
| @@ -51,7 +51,7 @@ | |||||
| <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.0.0" /> | ||||
| <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | <PackageReference Include="MSTest.TestAdapter" Version="2.2.8" /> | ||||
| <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | <PackageReference Include="MSTest.TestFramework" Version="2.2.8" /> | ||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.7.0" /> | |||||
| <PackageReference Include="SciSharp.TensorFlow.Redist" Version="2.11.0" /> | |||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||