Browse Source

add Tesnor overload to GradientDescentOptimizer #422

tags/v0.12
Oceania2018 6 years ago
parent
commit
41cb39e35b
2 changed files with 11 additions and 4 deletions
  1. +5
    -4
      src/TensorFlowNET.Core/TensorFlowNET.Core.csproj
  2. +6
    -0
      src/TensorFlowNET.Core/Train/GradientDescentOptimizer.cs

+ 5
- 4
src/TensorFlowNET.Core/TensorFlowNET.Core.csproj View File

@@ -5,7 +5,7 @@
<AssemblyName>TensorFlow.NET</AssemblyName> <AssemblyName>TensorFlow.NET</AssemblyName>
<RootNamespace>Tensorflow</RootNamespace> <RootNamespace>Tensorflow</RootNamespace>
<TargetTensorFlow>1.14.0</TargetTensorFlow> <TargetTensorFlow>1.14.0</TargetTensorFlow>
<Version>0.11.6</Version>
<Version>0.11.7</Version>
<Authors>Haiping Chen, Meinrad Recheis, Eli Belash</Authors> <Authors>Haiping Chen, Meinrad Recheis, Eli Belash</Authors>
<Company>SciSharp STACK</Company> <Company>SciSharp STACK</Company>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild> <GeneratePackageOnBuild>true</GeneratePackageOnBuild>
@@ -17,7 +17,7 @@
<PackageTags>TensorFlow, NumSharp, SciSharp, MachineLearning, TensorFlow.NET, C#</PackageTags> <PackageTags>TensorFlow, NumSharp, SciSharp, MachineLearning, TensorFlow.NET, C#</PackageTags>
<Description>Google's TensorFlow full binding in .NET Standard. <Description>Google's TensorFlow full binding in .NET Standard.
Docs: https://tensorflownet.readthedocs.io</Description> Docs: https://tensorflownet.readthedocs.io</Description>
<AssemblyVersion>0.11.6.0</AssemblyVersion>
<AssemblyVersion>0.11.7.0</AssemblyVersion>
<PackageReleaseNotes>Changes since v0.10.0: <PackageReleaseNotes>Changes since v0.10.0:
1. Upgrade NumSharp to v0.20.3. 1. Upgrade NumSharp to v0.20.3.
2. Add DisposableObject class to manage object lifetime. 2. Add DisposableObject class to manage object lifetime.
@@ -31,9 +31,10 @@ Docs: https://tensorflownet.readthedocs.io</Description>
10. Support n-dim indexing for tensor. 10. Support n-dim indexing for tensor.
11. Add RegisterNoGradients 11. Add RegisterNoGradients
12. Add CumsumGrad, BroadcastToGrad. 12. Add CumsumGrad, BroadcastToGrad.
13. Return VariableV1 instead of RefVariable.</PackageReleaseNotes>
13. Return VariableV1 instead of RefVariable.
14.</PackageReleaseNotes>
<LangVersion>7.3</LangVersion> <LangVersion>7.3</LangVersion>
<FileVersion>0.11.6.0</FileVersion>
<FileVersion>0.11.7.0</FileVersion>
<PackageLicenseFile>LICENSE</PackageLicenseFile> <PackageLicenseFile>LICENSE</PackageLicenseFile>
<PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance> <PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance>
<SignAssembly>true</SignAssembly> <SignAssembly>true</SignAssembly>


+ 6
- 0
src/TensorFlowNET.Core/Train/GradientDescentOptimizer.cs View File

@@ -39,6 +39,12 @@ namespace Tensorflow.Train
: base(learning_rate, use_locking, name) : base(learning_rate, use_locking, name)
{ {
_lr = learning_rate; _lr = learning_rate;
}
public GradientDescentOptimizer(Tensor learning_rate, bool use_locking = false, string name = "GradientDescent")
: base(learning_rate, use_locking, name)
{
_lr_t = learning_rate;
} }


public override void _prepare() public override void _prepare()


Loading…
Cancel
Save