Browse Source

correct ActivationAdaptor to ActivationAdapter

pull/1085/head
lingbai-kong 2 years ago
parent
commit
aba6a076cc
4 changed files with 51 additions and 51 deletions
  1. +12
    -12
      src/TensorFlowNET.Core/Keras/Activations/Activations.cs
  2. +8
    -8
      src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs
  3. +13
    -13
      src/TensorFlowNET.Keras/Activations.cs
  4. +18
    -18
      src/TensorFlowNET.Keras/Layers/LayersApi.cs

+ 12
- 12
src/TensorFlowNET.Core/Keras/Activations/Activations.cs View File

@@ -28,10 +28,10 @@ namespace Tensorflow.Keras
}

/// <summary>
/// The ActivationAdaptor is used to store string, Activation, and Func for Laysers Api to accept different types of activation parameters.
/// The ActivationAdapter is used to store string, Activation, and Func for Laysers Api to accept different types of activation parameters.
/// One of the properties must be specified while initializing.
/// </summary>
public class ActivationAdaptor
public class ActivationAdapter
{
/// <summary>
/// The name of activaiton function, such as `tanh`, `sigmoid`.
@@ -48,34 +48,34 @@ namespace Tensorflow.Keras
/// </summary>
public Func<Tensor, string, Tensor>? Func { get; set; } = null;

public ActivationAdaptor(string name)
public ActivationAdapter(string name)
{
Name = name;
}

public ActivationAdaptor(Activation activation)
public ActivationAdapter(Activation activation)
{
Activation = activation;
}

public ActivationAdaptor(Func<Tensor, string, Tensor> func)
public ActivationAdapter(Func<Tensor, string, Tensor> func)
{
Func = func;
}

public static implicit operator ActivationAdaptor(string name)
public static implicit operator ActivationAdapter(string name)
{
return new ActivationAdaptor(name);
return new ActivationAdapter(name);
}

public static implicit operator ActivationAdaptor(Activation activation)
public static implicit operator ActivationAdapter(Activation activation)
{
return new ActivationAdaptor(activation);
return new ActivationAdapter(activation);
}

public static implicit operator ActivationAdaptor(Func<Tensor, string, Tensor> func)
public static implicit operator ActivationAdapter(Func<Tensor, string, Tensor> func)
{
return new ActivationAdaptor(func);
return new ActivationAdapter(func);
}
}

@@ -84,7 +84,7 @@ namespace Tensorflow.Keras
{
Activation GetActivationFromName(string name);
Activation GetActivationFromAdaptor(ActivationAdaptor adaptor);
Activation GetActivationFromAdapter(ActivationAdapter adapter);

Activation Linear { get; }



+ 8
- 8
src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs View File

@@ -48,7 +48,7 @@ namespace Tensorflow.Keras.Layers
string data_format = "channels_last",
int dilation_rate = 1,
int groups = 1,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
bool use_bias = true,
string kernel_initializer = "glorot_uniform",
string bias_initializer = "zeros");
@@ -71,7 +71,7 @@ namespace Tensorflow.Keras.Layers
string data_format = null,
Shape dilation_rate = null,
int groups = 1,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer bias_initializer = null,
@@ -99,7 +99,7 @@ namespace Tensorflow.Keras.Layers
string output_padding = "valid",
string data_format = null,
Shape dilation_rate = null,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
bool use_bias = true,
string kernel_initializer = null,
string bias_initializer = null,
@@ -121,7 +121,7 @@ namespace Tensorflow.Keras.Layers
string activity_regularizer = null);

public ILayer Dense(int units,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
IInitializer kernel_initializer = null,
bool use_bias = true,
IInitializer bias_initializer = null,
@@ -155,7 +155,7 @@ namespace Tensorflow.Keras.Layers
public ILayer EinsumDense(string equation,
Shape output_shape,
string bias_axes,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
IInitializer kernel_initializer = null,
IInitializer bias_initializer = null,
IRegularizer kernel_regularizer = null,
@@ -209,8 +209,8 @@ namespace Tensorflow.Keras.Layers
public ILayer LeakyReLU(float alpha = 0.3f);

public ILayer LSTM(int units,
ActivationAdaptor activation = null,
ActivationAdaptor recurrent_activation = null,
ActivationAdapter activation = null,
ActivationAdapter recurrent_activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer recurrent_initializer = null,
@@ -259,7 +259,7 @@ namespace Tensorflow.Keras.Layers
Shape input_shape = null);

public ILayer SimpleRNN(int units,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
string kernel_initializer = "glorot_uniform",
string recurrent_initializer = "orthogonal",
string bias_initializer = "zeros",


+ 13
- 13
src/TensorFlowNET.Keras/Activations.cs View File

@@ -94,37 +94,37 @@ namespace Tensorflow.Keras
}

/// <summary>
/// Convert ActivationAdaptor to Activation.
/// If more than one properties of ActivationAdaptor are specified, the order of priority is `Name`, `Activation`, `Func`
/// Convert ActivationAdapter to Activation.
/// If more than one properties of ActivationAdapter are specified, the order of priority is `Name`, `Activation`, `Func`
/// </summary>
/// <param name="adaptor"></param>
/// <param name="adapter"></param>
/// <returns></returns>
/// <exception cref="Exception"></exception>
public Activation GetActivationFromAdaptor(ActivationAdaptor adaptor)
public Activation GetActivationFromAdapter(ActivationAdapter adapter)
{
if(adaptor == null)
if(adapter == null)
{
return _linear;
}
if(adaptor.Name != null)
if(adapter.Name != null)
{
return GetActivationFromName(adaptor.Name);
return GetActivationFromName(adapter.Name);
}
else if(adaptor.Activation != null)
else if(adapter.Activation != null)
{
return adaptor.Activation;
return adapter.Activation;
}
else if(adaptor.Func != null)
else if(adapter.Func != null)
{
return new Activation()
{
Name = adaptor.Func.GetMethodInfo().Name,
ActivationFunction = adaptor.Func
Name = adapter.Func.GetMethodInfo().Name,
ActivationFunction = adapter.Func
};
}
else
{
throw new Exception("Could not interpret activation adaptor");
throw new Exception("Could not interpret activation adapter");
}
}
}


+ 18
- 18
src/TensorFlowNET.Keras/Layers/LayersApi.cs View File

@@ -94,7 +94,7 @@ namespace Tensorflow.Keras.Layers
string data_format = "channels_last",
int dilation_rate = 1,
int groups = 1,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
bool use_bias = true,
string kernel_initializer = "glorot_uniform",
string bias_initializer = "zeros")
@@ -109,7 +109,7 @@ namespace Tensorflow.Keras.Layers
DilationRate = dilation_rate,
Groups = groups,
UseBias = use_bias,
Activation = keras.activations.GetActivationFromAdaptor(activation),
Activation = keras.activations.GetActivationFromAdapter(activation),
KernelInitializer = GetInitializerByName(kernel_initializer),
BiasInitializer = GetInitializerByName(bias_initializer)
});
@@ -167,7 +167,7 @@ namespace Tensorflow.Keras.Layers
string data_format = null,
Shape dilation_rate = null,
int groups = 1,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer bias_initializer = null,
@@ -190,7 +190,7 @@ namespace Tensorflow.Keras.Layers
BiasInitializer = bias_initializer == null ? tf.zeros_initializer : bias_initializer,
BiasRegularizer = bias_regularizer,
ActivityRegularizer = activity_regularizer,
Activation = keras.activations.GetActivationFromAdaptor(activation),
Activation = keras.activations.GetActivationFromAdapter(activation),
});
public ILayer Conv2D(int filters,
Shape kernel_size = null,
@@ -248,7 +248,7 @@ namespace Tensorflow.Keras.Layers
string output_padding = "valid",
string data_format = null,
Shape dilation_rate = null,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
bool use_bias = true,
string kernel_initializer = null,
string bias_initializer = null,
@@ -267,7 +267,7 @@ namespace Tensorflow.Keras.Layers
UseBias = use_bias,
KernelInitializer = GetInitializerByName(kernel_initializer),
BiasInitializer = GetInitializerByName(bias_initializer),
Activation = keras.activations.GetActivationFromAdaptor(activation)
Activation = keras.activations.GetActivationFromAdapter(activation)
});
public ILayer Conv2DTranspose(int filters,
Shape kernel_size = null,
@@ -317,7 +317,7 @@ namespace Tensorflow.Keras.Layers
/// <param name="bias_constraint">Constraint function for the bias.</param>
/// <returns>N-D tensor with shape: (batch_size, ..., units). For instance, for a 2D input with shape (batch_size, input_dim), the output would have shape (batch_size, units).</returns>
public ILayer Dense(int units,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
IInitializer kernel_initializer = null,
bool use_bias = true,
IInitializer bias_initializer = null,
@@ -330,7 +330,7 @@ namespace Tensorflow.Keras.Layers
=> new Dense(new DenseArgs
{
Units = units,
Activation = keras.activations.GetActivationFromAdaptor(activation),
Activation = keras.activations.GetActivationFromAdapter(activation),
KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer,
BiasInitializer = bias_initializer ?? (use_bias ? tf.zeros_initializer : null),
InputShape = input_shape,
@@ -386,7 +386,7 @@ namespace Tensorflow.Keras.Layers
/// <returns></returns>
public Tensor dense(Tensor inputs,
int units,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer bias_initializer = null,
@@ -405,7 +405,7 @@ namespace Tensorflow.Keras.Layers
var layer = new Dense(new DenseArgs
{
Units = units,
Activation = keras.activations.GetActivationFromAdaptor(activation),
Activation = keras.activations.GetActivationFromAdapter(activation),
UseBias = use_bias,
BiasInitializer = bias_initializer,
KernelInitializer = kernel_initializer,
@@ -460,7 +460,7 @@ namespace Tensorflow.Keras.Layers
public ILayer EinsumDense(string equation,
Shape output_shape,
string bias_axes,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
IInitializer kernel_initializer= null,
IInitializer bias_initializer= null,
IRegularizer kernel_regularizer= null,
@@ -473,7 +473,7 @@ namespace Tensorflow.Keras.Layers
Equation = equation,
OutputShape = output_shape,
BiasAxes = bias_axes,
Activation = keras.activations.GetActivationFromAdaptor(activation),
Activation = keras.activations.GetActivationFromAdapter(activation),
KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer,
BiasInitializer = bias_initializer ?? tf.zeros_initializer,
KernelRegularizer = kernel_regularizer,
@@ -807,7 +807,7 @@ namespace Tensorflow.Keras.Layers
/// <param name="activation">The name of the activation function to use. Default: hyperbolic tangent (tanh)..</param>
/// <returns></returns>
public ILayer SimpleRNN(int units,
ActivationAdaptor activation = null,
ActivationAdapter activation = null,
string kernel_initializer = "glorot_uniform",
string recurrent_initializer = "orthogonal",
string bias_initializer = "zeros",
@@ -816,7 +816,7 @@ namespace Tensorflow.Keras.Layers
=> new SimpleRNN(new SimpleRNNArgs
{
Units = units,
Activation = activation == null ? keras.activations.GetActivationFromAdaptor(activation): keras.activations.Tanh,
Activation = activation == null ? keras.activations.GetActivationFromAdapter(activation): keras.activations.Tanh,
KernelInitializer = GetInitializerByName(kernel_initializer),
RecurrentInitializer = GetInitializerByName(recurrent_initializer),
BiasInitializer = GetInitializerByName(bias_initializer),
@@ -869,8 +869,8 @@ namespace Tensorflow.Keras.Layers
/// </param>
/// <returns></returns>
public ILayer LSTM(int units,
ActivationAdaptor activation = null,
ActivationAdaptor recurrent_activation = null,
ActivationAdapter activation = null,
ActivationAdapter recurrent_activation = null,
bool use_bias = true,
IInitializer kernel_initializer = null,
IInitializer recurrent_initializer = null,
@@ -888,8 +888,8 @@ namespace Tensorflow.Keras.Layers
=> new LSTM(new LSTMArgs
{
Units = units,
Activation = activation == null ? keras.activations.GetActivationFromAdaptor(activation) : keras.activations.Tanh,
RecurrentActivation = recurrent_activation == null ? keras.activations.GetActivationFromAdaptor(activation) : keras.activations.Sigmoid,
Activation = activation == null ? keras.activations.GetActivationFromAdapter(activation) : keras.activations.Tanh,
RecurrentActivation = recurrent_activation == null ? keras.activations.GetActivationFromAdapter(activation) : keras.activations.Sigmoid,
KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer,
RecurrentInitializer = recurrent_initializer ?? tf.orthogonal_initializer,
BiasInitializer = bias_initializer ?? tf.zeros_initializer,


Loading…
Cancel
Save