using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.Engine;
namespace Tensorflow.Keras
{
public interface IOptimizerApi
{
///
/// Adam optimization is a stochastic gradient descent method that is based on
/// adaptive estimation of first-order and second-order moments.
///
///
///
///
///
///
///
///
IOptimizer Adam(float learning_rate = 0.001f,
float beta_1 = 0.9f,
float beta_2 = 0.999f,
float epsilon = 1e-7f,
bool amsgrad = false,
string name = "Adam");
///
/// Construct a new RMSprop optimizer.
///
///
///
///
///
///
///
///
IOptimizer RMSprop(float learning_rate = 0.001f,
float rho = 0.9f,
float momentum = 0.0f,
float epsilon = 1e-7f,
bool centered = false,
string name = "RMSprop");
IOptimizer SGD(float learning_rate);
}
}