using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
namespace Tensorflow.Keras.Optimizers
{
public class OptimizerApi: IOptimizerApi
{
///
/// Adam optimization is a stochastic gradient descent method that is based on
/// adaptive estimation of first-order and second-order moments.
///
///
///
///
///
///
///
///
public IOptimizer Adam(float learning_rate = 0.001f,
float beta_1 = 0.9f,
float beta_2 = 0.999f,
float epsilon = 1e-7f,
bool amsgrad = false,
string name = "Adam")
=> new Adam(learning_rate: learning_rate,
beta_1: beta_1,
beta_2: beta_2,
epsilon: epsilon,
amsgrad: amsgrad,
name: name);
///
/// Construct a new RMSprop optimizer.
///
///
///
///
///
///
///
///
public IOptimizer RMSprop(float learning_rate = 0.001f,
float rho = 0.9f,
float momentum = 0.0f,
float epsilon = 1e-7f,
bool centered = false,
string name = "RMSprop")
=> new RMSprop(new RMSpropArgs
{
LearningRate = learning_rate,
RHO = rho,
Momentum = momentum,
Epsilon = epsilon,
Centered = centered,
Name = name
});
public IOptimizer SGD(float learning_rate)
=> new SGD(learning_rate);
}
}