using NumSharp;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;
using static Tensorflow.KerasApi;
namespace Tensorflow.Keras.Layers
{
public class LayersApi
{
///
/// Functional interface for the batch normalization layer.
/// http://arxiv.org/abs/1502.03167
///
///
///
///
///
///
///
///
///
///
///
///
///
///
///
///
///
public Tensors batch_normalization(Tensor inputs,
int axis = -1,
float momentum = 0.99f,
float epsilon = 0.001f,
bool center = true,
bool scale = true,
IInitializer beta_initializer = null,
IInitializer gamma_initializer = null,
IInitializer moving_mean_initializer = null,
IInitializer moving_variance_initializer = null,
Tensor training = null,
bool trainable = true,
string name = null,
bool renorm = false,
float renorm_momentum = 0.99f)
{
var layer = new BatchNormalization(new BatchNormalizationArgs
{
Axis = axis,
Momentum = momentum,
Epsilon = epsilon,
Center = center,
Scale = scale,
BetaInitializer = beta_initializer,
GammaInitializer = gamma_initializer,
MovingMeanInitializer = moving_mean_initializer,
MovingVarianceInitializer = moving_variance_initializer,
Renorm = renorm,
RenormMomentum = renorm_momentum,
Trainable = trainable,
Name = name
});
return layer.Apply(inputs);
}
///
///
///
///
///
///
///
///
///
///
/// tf.keras.activations
///
///
///
///
///
///
///
public Conv2D Conv2D(int filters,
TensorShape kernel_size = null,
TensorShape strides = null,
string padding = "valid",
string data_format = null,
TensorShape dilation_rate = null,
int groups = 1,
Activation activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer bias_initializer = null,
IRegularizer kernel_regularizer = null,
IRegularizer bias_regularizer = null,
IRegularizer activity_regularizer = null)
=> new Conv2D(new Conv2DArgs
{
Rank = 2,
Filters = filters,
KernelSize = kernel_size,
Strides = strides == null ? (1, 1) : strides,
Padding = padding,
DataFormat = data_format,
DilationRate = dilation_rate == null ? (1, 1) : dilation_rate,
Groups = groups,
UseBias = use_bias,
KernelRegularizer = kernel_regularizer,
KernelInitializer = kernel_initializer == null ? tf.glorot_uniform_initializer : kernel_initializer,
BiasInitializer = bias_initializer == null ? tf.zeros_initializer : bias_initializer,
BiasRegularizer = bias_regularizer,
ActivityRegularizer = activity_regularizer,
Activation = activation ?? keras.activations.Linear
});
public Tensor conv2d(Tensor inputs,
int filters,
int[] kernel_size,
int[] strides = null,
string padding = "valid",
string data_format = "channels_last",
int[] dilation_rate = null,
bool use_bias = true,
Activation activation = null,
IInitializer kernel_initializer = null,
IInitializer bias_initializer = null,
bool trainable = true,
string name = null)
{
if (strides == null)
strides = new int[] { 1, 1 };
if (dilation_rate == null)
dilation_rate = new int[] { 1, 1 };
if (bias_initializer == null)
bias_initializer = tf.zeros_initializer;
var layer = new Conv2D(new Conv2DArgs
{
Filters = filters,
KernelSize = kernel_size,
Strides = strides,
Padding = padding,
DataFormat = data_format,
DilationRate = dilation_rate,
Activation = activation,
UseBias = use_bias,
KernelInitializer = kernel_initializer,
BiasInitializer = bias_initializer,
Trainable = trainable,
Name = name
});
return layer.Apply(inputs);
}
public Dense Dense(int units,
Activation activation = null,
TensorShape input_shape = null)
=> new Dense(new DenseArgs
{
Units = units,
Activation = activation ?? keras.activations.Linear,
InputShape = input_shape
});
///
/// Densely-connected layer class. aka fully-connected
/// `outputs = activation(inputs * kernel + bias)`
///
///
/// Python integer, dimensionality of the output space.
///
/// Boolean, whether the layer uses a bias.
///
///
///
///
///
///
public Tensor dense(Tensor inputs,
int units,
Activation activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer bias_initializer = null,
bool trainable = true,
string name = null,
bool? reuse = null)
{
if (bias_initializer == null)
bias_initializer = tf.zeros_initializer;
var layer = new Dense(new DenseArgs
{
Units = units,
Activation = activation,
UseBias = use_bias,
BiasInitializer = bias_initializer,
KernelInitializer = kernel_initializer,
Trainable = trainable,
Name = name
});
return layer.Apply(inputs);
}
public Dropout Dropout(float rate, TensorShape noise_shape = null, int? seed = null)
=> new Dropout(new DropoutArgs
{
Rate = rate,
NoiseShape = noise_shape,
Seed = seed
});
///
/// Turns positive integers (indexes) into dense vectors of fixed size.
/// This layer can only be used as the first layer in a model.
/// e.g. [[4], [20]] -> [[0.25, 0.1], [0.6, -0.2]]
/// https://www.tensorflow.org/api_docs/python/tf/keras/layers/Embedding
///
/// Size of the vocabulary, i.e. maximum integer index + 1.
/// Dimension of the dense embedding.
/// Initializer for the embeddings matrix (see keras.initializers).
///
///
public Embedding Embedding(int input_dim,
int output_dim,
IInitializer embeddings_initializer = null,
bool mask_zero = false,
TensorShape input_shape = null,
int input_length = -1)
=> new Embedding(new EmbeddingArgs
{
InputDim = input_dim,
OutputDim = output_dim,
MaskZero = mask_zero,
InputShape = input_shape ?? input_length,
InputLength = input_length,
EmbeddingsInitializer = embeddings_initializer
});
public Flatten Flatten(string data_format = null)
=> new Flatten(new FlattenArgs
{
DataFormat = data_format
});
///
/// `Input()` is used to instantiate a Keras tensor.
///
/// A shape tuple not including the batch size.
///
///
///
///
public Tensors Input(TensorShape shape,
string name = null,
bool sparse = false,
bool ragged = false)
{
var input_layer = new InputLayer(new InputLayerArgs
{
InputShape = shape,
Name = name,
Sparse = sparse,
Ragged = ragged
});
return input_layer.InboundNodes[0].Outputs;
}
public MaxPooling2D MaxPooling2D(TensorShape pool_size = null,
TensorShape strides = null,
string padding = "valid")
=> new MaxPooling2D(new MaxPooling2DArgs
{
PoolSize = pool_size ?? (2, 2),
Strides = strides,
Padding = padding
});
///
/// Max pooling layer for 2D inputs (e.g. images).
///
/// The tensor over which to pool. Must have rank 4.
///
///
///
///
///
///
public Tensor max_pooling2d(Tensor inputs,
int[] pool_size,
int[] strides,
string padding = "valid",
string data_format = "channels_last",
string name = null)
{
var layer = new MaxPooling2D(new MaxPooling2DArgs
{
PoolSize = pool_size,
Strides = strides,
Padding = padding,
DataFormat = data_format,
Name = name
});
return layer.Apply(inputs);
}
public Layer LSTM(int units,
Activation activation = null,
Activation recurrent_activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer recurrent_initializer = null,
IInitializer bias_initializer = null,
bool unit_forget_bias = true,
float dropout = 0f,
float recurrent_dropout = 0f,
int implementation = 2,
bool return_sequences = false,
bool return_state = false,
bool go_backwards = false,
bool stateful = false,
bool time_major = false,
bool unroll = false)
=> new LSTM(new LSTMArgs
{
Units = units,
Activation = activation ?? keras.activations.Tanh,
RecurrentActivation = recurrent_activation ?? keras.activations.Sigmoid,
KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer,
RecurrentInitializer = recurrent_initializer ?? tf.orthogonal_initializer,
BiasInitializer = bias_initializer ?? tf.zeros_initializer,
Dropout = dropout,
RecurrentDropout = recurrent_dropout,
Implementation = implementation,
ReturnSequences = return_sequences,
ReturnState = return_state,
GoBackwards = go_backwards,
Stateful = stateful,
TimeMajor = time_major,
Unroll = unroll
});
public Rescaling Rescaling(float scale,
float offset = 0,
TensorShape input_shape = null)
=> new Rescaling(new RescalingArgs
{
Scale = scale,
Offset = offset,
InputShape = input_shape
});
///
/// Zero-padding layer for 2D input (e.g. picture).
///
///
///
public ZeroPadding2D ZeroPadding2D(NDArray padding)
=> new ZeroPadding2D(new ZeroPadding2DArgs
{
Padding = padding
});
Activation GetActivationByName(string name)
=> name switch
{
"linear" => keras.activations.Linear,
"relu" => keras.activations.Relu,
"sigmoid" => keras.activations.Sigmoid,
"tanh" => keras.activations.Tanh,
_ => keras.activations.Linear
};
}
}