Browse Source

Merge abe33a3829 into 0454c7b068

pull/1094/merge
Beacontownfc GitHub 2 years ago
parent
commit
a09bb80bdc
No known key found for this signature in database GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 46 additions and 0 deletions
  1. +9
    -0
      src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs
  2. +2
    -0
      src/TensorFlowNET.Keras/Layers/Core/Dense.cs
  3. +35
    -0
      src/TensorFlowNET.Keras/Layers/LayersApi.cs

+ 9
- 0
src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs View File

@@ -105,6 +105,15 @@ namespace Tensorflow.Keras.Layers
bool use_bias = true,
IInitializer bias_initializer = null,
Shape input_shape = null);
public ILayer Dense(int units,
string activation = null,
IInitializer kernel_initializer = null,
bool use_bias = true,
IInitializer bias_initializer = null,
IRegularizer kernel_regularizer = null,
IRegularizer bias_regularizer = null,
Shape input_shape = null);

public ILayer Dropout(float rate, Shape noise_shape = null, int? seed = null);



+ 2
- 0
src/TensorFlowNET.Keras/Layers/Core/Dense.cs View File

@@ -57,6 +57,7 @@ namespace Tensorflow.Keras.Layers
"kernel",
shape: new Shape(last_dim, args.Units),
initializer: args.KernelInitializer,
regularizer: args.KernelRegularizer,
dtype: DType,
trainable: true);
if (args.UseBias)
@@ -64,6 +65,7 @@ namespace Tensorflow.Keras.Layers
"bias",
shape: new Shape(args.Units),
initializer: args.BiasInitializer,
regularizer: args.BiasRegularizer,
dtype: DType,
trainable: true);



+ 35
- 0
src/TensorFlowNET.Keras/Layers/LayersApi.cs View File

@@ -303,6 +303,41 @@ namespace Tensorflow.Keras.Layers
Units = units,
Activation = keras.activations.GetActivationFromName("linear")
});
/// <summary>
/// Just your regular densely-connected NN layer.
///
/// Dense implements the operation: output = activation(dot(input, kernel) + bias) where activation is the
/// element-wise activation function passed as the activation argument, kernel is a weights matrix created by the layer,
/// and bias is a bias vector created by the layer (only applicable if use_bias is True).
/// </summary>
/// <param name="units">Positive integer, dimensionality of the output space.</param>
/// <param name="activation">Activation function to use. If you don't specify anything, no activation is applied (ie. "linear" activation: a(x) = x).</param>
/// <param name="kernel_initializer">Initializer for the kernel weights matrix.</param>
/// <param name="use_bias">Boolean, whether the layer uses a bias vector.</param>
/// <param name="bias_initializer">Initializer for the bias vector.</param>
/// <param name="kernel_regularizer">A regularizer that applies a L1 regularization penalty for kernel.</param>
/// <param name="bias_regularizer">A regularizer that applies a L1 regularization penalty for bias.</param>
/// <param name="input_shape">N-D tensor with shape: (batch_size, ..., input_dim). The most common situation would be a 2D input with shape (batch_size, input_dim).</param>
/// <returns>N-D tensor with shape: (batch_size, ..., units). For instance, for a 2D input with shape (batch_size, input_dim), the output would have shape (batch_size, units).</returns>

public ILayer Dense(int units,
string activation = null,
IInitializer kernel_initializer = null,
bool use_bias = true,
IInitializer bias_initializer = null,
IRegularizer kernel_regularizer = null,
IRegularizer bias_regularizer = null,
Shape input_shape = null)
=> new Dense(new DenseArgs
{
Units = units,
Activation = keras.activations.GetActivationFromName(activation),
KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer,
BiasInitializer = bias_initializer ?? (use_bias ? tf.zeros_initializer : null),
InputShape = input_shape,
KernelRegularizer = kernel_regularizer,
BiasRegularizer = bias_regularizer
});

/// <summary>
/// Just your regular densely-connected NN layer.


Loading…
Cancel
Save