| @@ -13,6 +13,8 @@ namespace Tensorflow.Keras | |||||
| List<INode> OutboundNodes { get; } | List<INode> OutboundNodes { get; } | ||||
| Tensors Apply(Tensors inputs, Tensor state = null, bool is_training = false); | Tensors Apply(Tensors inputs, Tensor state = null, bool is_training = false); | ||||
| List<IVariableV1> trainable_variables { get; } | List<IVariableV1> trainable_variables { get; } | ||||
| List<IVariableV1> trainable_weights { get; } | |||||
| List<IVariableV1> non_trainable_weights { get; } | |||||
| TensorShape output_shape { get; } | TensorShape output_shape { get; } | ||||
| int count_params(); | int count_params(); | ||||
| LayerArgs get_config(); | LayerArgs get_config(); | ||||
| @@ -67,6 +67,8 @@ namespace Tensorflow | |||||
| public bool Trainable => throw new NotImplementedException(); | public bool Trainable => throw new NotImplementedException(); | ||||
| public List<IVariableV1> trainable_variables => throw new NotImplementedException(); | public List<IVariableV1> trainable_variables => throw new NotImplementedException(); | ||||
| public List<IVariableV1> trainable_weights => throw new NotImplementedException(); | |||||
| public List<IVariableV1> non_trainable_weights => throw new NotImplementedException(); | |||||
| public TensorShape output_shape => throw new NotImplementedException(); | public TensorShape output_shape => throw new NotImplementedException(); | ||||
| @@ -239,6 +239,21 @@ namespace Tensorflow.Keras.Engine | |||||
| return layer_utils.count_params(this, weights); | return layer_utils.count_params(this, weights); | ||||
| return 0; | return 0; | ||||
| } | } | ||||
| List<IVariableV1> ILayer.trainable_weights | |||||
| { | |||||
| get | |||||
| { | |||||
| return trainable_weights; | |||||
| } | |||||
| } | |||||
| List<IVariableV1> ILayer.non_trainable_weights | |||||
| { | |||||
| get | |||||
| { | |||||
| return non_trainable_weights; | |||||
| } | |||||
| } | |||||
| public List<IVariableV1> weights | public List<IVariableV1> weights | ||||
| { | { | ||||
| @@ -0,0 +1,56 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| using HDF.PInvoke; | |||||
| using HDF5CSharp; | |||||
| using Tensorflow.Keras.Saving; | |||||
| namespace Tensorflow.Keras.Engine | |||||
| { | |||||
| public partial class Model | |||||
| { | |||||
| private long fileId = -1; | |||||
| private long f = -1; | |||||
| public void load_weights(string filepath ="",bool by_name= false, bool skip_mismatch=false, object options = null) | |||||
| { | |||||
| long root = Hdf5.OpenFile(filepath, true); | |||||
| long fileId = root; | |||||
| //try | |||||
| //{ | |||||
| bool msuccess = Hdf5.GroupExists(fileId, "model_weights"); | |||||
| bool lsuccess = Hdf5.GroupExists(fileId, "layer_names"); | |||||
| if (!lsuccess && msuccess) | |||||
| { | |||||
| f = H5G.open(fileId, "model_weights"); | |||||
| } | |||||
| if (by_name) | |||||
| { | |||||
| //fdf5_format.load_weights_from_hdf5_group_by_name(); | |||||
| } | |||||
| else | |||||
| { | |||||
| fdf5_format.load_weights_from_hdf5_group(f, this); | |||||
| } | |||||
| H5G.close(f); | |||||
| //} | |||||
| //catch (Exception ex) | |||||
| //{ | |||||
| // if (fileId != -1) | |||||
| // { | |||||
| // Hdf5.CloseFile(fileId); | |||||
| // } | |||||
| // if (f != -1) | |||||
| // { | |||||
| // H5G.close(f); | |||||
| // } | |||||
| // throw new Exception(ex.ToString()); | |||||
| //} | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -0,0 +1,179 @@ | |||||
| using System; | |||||
| using System.Collections.Generic; | |||||
| using System.Text; | |||||
| using HDF.PInvoke; | |||||
| using NumSharp; | |||||
| using Tensorflow.Keras.Engine; | |||||
| using HDF5CSharp; | |||||
| using static Tensorflow.Binding; | |||||
| using static Tensorflow.KerasApi; | |||||
| namespace Tensorflow.Keras.Saving | |||||
| { | |||||
| public class fdf5_format | |||||
| { | |||||
| public static void load_model_from_hdf5(string filepath = "", Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| long root = Hdf5.OpenFile(filepath,true); | |||||
| load_model_from_hdf5(root, custom_objects, compile); | |||||
| } | |||||
| public static void load_model_from_hdf5(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| //long fileId = filepath; | |||||
| //try | |||||
| //{ | |||||
| // groupId = H5G.open(fileId, "/"); | |||||
| // (bool success, string[] attrId) = Hdf5.ReadStringAttributes(groupId, "model_config", ""); | |||||
| // H5G.close(groupId); | |||||
| // if (success == true) { | |||||
| // Console.WriteLine(attrId[0]); | |||||
| // } | |||||
| //} | |||||
| //catch (Exception ex) | |||||
| //{ | |||||
| // if (filepath != -1) { | |||||
| // Hdf5.CloseFile(filepath); | |||||
| // } | |||||
| // if (groupId != -1) { | |||||
| // H5G.close(groupId); | |||||
| // } | |||||
| // throw new Exception(ex.ToString()); | |||||
| //} | |||||
| } | |||||
| public static void save_model_to_hdf5(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void preprocess_weights_for_loading(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void _convert_rnn_weights(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void save_optimizer_weights_to_hdf5_group(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void load_optimizer_weights_from_hdf5_group(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void save_weights_to_hdf5_group(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void load_weights_from_hdf5_group(long f=-1,Model model=null) | |||||
| { | |||||
| string original_keras_version = "1"; | |||||
| string original_backend = null; | |||||
| if (Hdf5.AttributeExists(f, "keras_version")) | |||||
| { | |||||
| (bool success, string[] attr) = Hdf5.ReadStringAttributes(f, "keras_version", ""); | |||||
| if (success) | |||||
| { | |||||
| original_keras_version = attr[0]; | |||||
| } | |||||
| } | |||||
| if (Hdf5.AttributeExists(f, "backend")) | |||||
| { | |||||
| (bool success, string[] attr) = Hdf5.ReadStringAttributes(f, "backend", ""); | |||||
| if (success) | |||||
| { | |||||
| original_backend = attr[0]; | |||||
| } | |||||
| } | |||||
| List<ILayer> filtered_layers = new List<ILayer>(); | |||||
| List<Tensor> weights; | |||||
| foreach (var layer in model.Layers) | |||||
| { | |||||
| weights = _legacy_weights(layer); | |||||
| if (weights.Count>0) | |||||
| { | |||||
| filtered_layers.append(layer); | |||||
| } | |||||
| } | |||||
| string[] layer_names = load_attributes_from_hdf5_group(f,"layer_names"); | |||||
| List<NDArray> weight_values=new List<NDArray>(); | |||||
| foreach (var i in filtered_layers) { | |||||
| long g = H5G.open(f, i.Name); | |||||
| string[] weight_names = null; | |||||
| if (g != -1) | |||||
| { | |||||
| weight_names = load_attributes_from_hdf5_group(g, "weight_names"); | |||||
| } | |||||
| if (weight_names != null) | |||||
| { | |||||
| foreach (var i_ in weight_names) { | |||||
| (bool success, Array result) = Hdf5.ReadDataset<float>(g, i_); | |||||
| // | |||||
| weight_values.Add(np.array(result)); | |||||
| } | |||||
| } | |||||
| H5G.close(g); | |||||
| } | |||||
| } | |||||
| public static void toarrayf4(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void load_weights_from_hdf5_group_by_name(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static void save_attributes_to_hdf5_group(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static string[] load_attributes_from_hdf5_group(long f = -1, string name = "") | |||||
| { | |||||
| if (Hdf5.AttributeExists(f, name)) | |||||
| { | |||||
| (bool success, string[] attr) = Hdf5.ReadStringAttributes(f, name, ""); | |||||
| if (success) | |||||
| { | |||||
| return attr; | |||||
| } | |||||
| } | |||||
| return null; | |||||
| } | |||||
| public static void load_attributes_from_hdf5_group(long filepath = -1, Dictionary<string, object> custom_objects = null, bool compile = false) | |||||
| { | |||||
| } | |||||
| public static List<Tensor> _legacy_weights(ILayer layer) | |||||
| { | |||||
| List<Tensor> weights= new List<Tensor>(); | |||||
| if (layer.trainable_weights.Count != 0) | |||||
| { | |||||
| Tensor[] trainable_weights = Array.ConvertAll<IVariableV1, Tensor>(layer.trainable_weights.ToArray(), s => s.AsTensor()); | |||||
| Tensor[] non_trainable_weights =null; | |||||
| if (layer.non_trainable_weights.Count != 0) | |||||
| { | |||||
| non_trainable_weights = Array.ConvertAll<IVariableV1, Tensor>(layer.non_trainable_weights.ToArray(), s => s.AsTensor()); | |||||
| } | |||||
| foreach (var i in trainable_weights) { | |||||
| if (non_trainable_weights != null) | |||||
| { | |||||
| foreach (var i_ in non_trainable_weights) | |||||
| { | |||||
| weights.Add(i + i_); | |||||
| } | |||||
| } | |||||
| else { | |||||
| weights.Add(i); | |||||
| }; | |||||
| } | |||||
| } | |||||
| return weights; | |||||
| } | |||||
| } | |||||
| } | |||||
| @@ -24,7 +24,7 @@ | |||||
| Keras is an API designed for human beings, not machines. Keras follows best practices for reducing cognitive load: it offers consistent & simple APIs, it minimizes the number of user actions required for common use cases, and it provides clear & actionable error messages.</Description> | Keras is an API designed for human beings, not machines. Keras follows best practices for reducing cognitive load: it offers consistent & simple APIs, it minimizes the number of user actions required for common use cases, and it provides clear & actionable error messages.</Description> | ||||
| <Company>SciSharp STACK</Company> | <Company>SciSharp STACK</Company> | ||||
| <GeneratePackageOnBuild>true</GeneratePackageOnBuild> | <GeneratePackageOnBuild>true</GeneratePackageOnBuild> | ||||
| <PackageTags>tensorflow, keras, deep learning, machine learning, scisharp</PackageTags> | |||||
| <PackageTags>tensorflow, keras, deep learning, machine learning</PackageTags> | |||||
| <PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance> | <PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance> | ||||
| <RepositoryType>Git</RepositoryType> | <RepositoryType>Git</RepositoryType> | ||||
| <SignAssembly>true</SignAssembly> | <SignAssembly>true</SignAssembly> | ||||
| @@ -44,16 +44,15 @@ Keras is an API designed for human beings, not machines. Keras follows best prac | |||||
| </PropertyGroup> | </PropertyGroup> | ||||
| <ItemGroup> | <ItemGroup> | ||||
| <PackageReference Include="HDF.PInvoke.1.10" Version="1.10.500" /> | |||||
| <PackageReference Include="MethodBoundaryAspect.Fody" Version="2.0.138" /> | <PackageReference Include="MethodBoundaryAspect.Fody" Version="2.0.138" /> | ||||
| <PackageReference Include="Newtonsoft.Json" Version="12.0.3" /> | <PackageReference Include="Newtonsoft.Json" Version="12.0.3" /> | ||||
| <PackageReference Include="NumSharp.Lite" Version="0.1.10" /> | |||||
| <PackageReference Include="SciSharp.Keras.HDF5" Version="1.1.10.500" /> | |||||
| <PackageReference Include="SharpZipLib" Version="1.3.1" /> | <PackageReference Include="SharpZipLib" Version="1.3.1" /> | ||||
| <PackageReference Include="ShellProgressBar" Version="5.0.0" /> | <PackageReference Include="ShellProgressBar" Version="5.0.0" /> | ||||
| </ItemGroup> | </ItemGroup> | ||||
| <ItemGroup> | |||||
| <Folder Include="Saving\" /> | |||||
| </ItemGroup> | |||||
| <ItemGroup> | <ItemGroup> | ||||
| <ProjectReference Include="..\TensorFlowNET.Core\Tensorflow.Binding.csproj" /> | <ProjectReference Include="..\TensorFlowNET.Core\Tensorflow.Binding.csproj" /> | ||||
| </ItemGroup> | </ItemGroup> | ||||