diff --git a/src/TensorFlowNET.Core/Keras/Activations/Activations.cs b/src/TensorFlowNET.Core/Keras/Activations/Activations.cs index ea01b319..e24e42ff 100644 --- a/src/TensorFlowNET.Core/Keras/Activations/Activations.cs +++ b/src/TensorFlowNET.Core/Keras/Activations/Activations.cs @@ -28,10 +28,10 @@ namespace Tensorflow.Keras } /// - /// The ActivationAdaptor is used to store string, Activation, and Func for Laysers Api to accept different types of activation parameters. + /// The ActivationAdapter is used to store string, Activation, and Func for Laysers Api to accept different types of activation parameters. /// One of the properties must be specified while initializing. /// - public class ActivationAdaptor + public class ActivationAdapter { /// /// The name of activaiton function, such as `tanh`, `sigmoid`. @@ -48,34 +48,34 @@ namespace Tensorflow.Keras /// public Func? Func { get; set; } = null; - public ActivationAdaptor(string name) + public ActivationAdapter(string name) { Name = name; } - public ActivationAdaptor(Activation activation) + public ActivationAdapter(Activation activation) { Activation = activation; } - public ActivationAdaptor(Func func) + public ActivationAdapter(Func func) { Func = func; } - public static implicit operator ActivationAdaptor(string name) + public static implicit operator ActivationAdapter(string name) { - return new ActivationAdaptor(name); + return new ActivationAdapter(name); } - public static implicit operator ActivationAdaptor(Activation activation) + public static implicit operator ActivationAdapter(Activation activation) { - return new ActivationAdaptor(activation); + return new ActivationAdapter(activation); } - public static implicit operator ActivationAdaptor(Func func) + public static implicit operator ActivationAdapter(Func func) { - return new ActivationAdaptor(func); + return new ActivationAdapter(func); } } @@ -84,7 +84,7 @@ namespace Tensorflow.Keras { Activation GetActivationFromName(string name); - Activation GetActivationFromAdaptor(ActivationAdaptor adaptor); + Activation GetActivationFromAdapter(ActivationAdapter adapter); Activation Linear { get; } diff --git a/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs b/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs index abeac4e5..b17f635b 100644 --- a/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs +++ b/src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs @@ -48,7 +48,7 @@ namespace Tensorflow.Keras.Layers string data_format = "channels_last", int dilation_rate = 1, int groups = 1, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, bool use_bias = true, string kernel_initializer = "glorot_uniform", string bias_initializer = "zeros"); @@ -71,7 +71,7 @@ namespace Tensorflow.Keras.Layers string data_format = null, Shape dilation_rate = null, int groups = 1, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, bool use_bias = true, IInitializer kernel_initializer = null, IInitializer bias_initializer = null, @@ -99,7 +99,7 @@ namespace Tensorflow.Keras.Layers string output_padding = "valid", string data_format = null, Shape dilation_rate = null, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, bool use_bias = true, string kernel_initializer = null, string bias_initializer = null, @@ -121,7 +121,7 @@ namespace Tensorflow.Keras.Layers string activity_regularizer = null); public ILayer Dense(int units, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, IInitializer kernel_initializer = null, bool use_bias = true, IInitializer bias_initializer = null, @@ -155,7 +155,7 @@ namespace Tensorflow.Keras.Layers public ILayer EinsumDense(string equation, Shape output_shape, string bias_axes, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, IInitializer kernel_initializer = null, IInitializer bias_initializer = null, IRegularizer kernel_regularizer = null, @@ -209,8 +209,8 @@ namespace Tensorflow.Keras.Layers public ILayer LeakyReLU(float alpha = 0.3f); public ILayer LSTM(int units, - ActivationAdaptor activation = null, - ActivationAdaptor recurrent_activation = null, + ActivationAdapter activation = null, + ActivationAdapter recurrent_activation = null, bool use_bias = true, IInitializer kernel_initializer = null, IInitializer recurrent_initializer = null, @@ -259,7 +259,7 @@ namespace Tensorflow.Keras.Layers Shape input_shape = null); public ILayer SimpleRNN(int units, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, string kernel_initializer = "glorot_uniform", string recurrent_initializer = "orthogonal", string bias_initializer = "zeros", diff --git a/src/TensorFlowNET.Keras/Activations.cs b/src/TensorFlowNET.Keras/Activations.cs index 4d08c77e..89988667 100644 --- a/src/TensorFlowNET.Keras/Activations.cs +++ b/src/TensorFlowNET.Keras/Activations.cs @@ -94,37 +94,37 @@ namespace Tensorflow.Keras } /// - /// Convert ActivationAdaptor to Activation. - /// If more than one properties of ActivationAdaptor are specified, the order of priority is `Name`, `Activation`, `Func` + /// Convert ActivationAdapter to Activation. + /// If more than one properties of ActivationAdapter are specified, the order of priority is `Name`, `Activation`, `Func` /// - /// + /// /// /// - public Activation GetActivationFromAdaptor(ActivationAdaptor adaptor) + public Activation GetActivationFromAdapter(ActivationAdapter adapter) { - if(adaptor == null) + if(adapter == null) { return _linear; } - if(adaptor.Name != null) + if(adapter.Name != null) { - return GetActivationFromName(adaptor.Name); + return GetActivationFromName(adapter.Name); } - else if(adaptor.Activation != null) + else if(adapter.Activation != null) { - return adaptor.Activation; + return adapter.Activation; } - else if(adaptor.Func != null) + else if(adapter.Func != null) { return new Activation() { - Name = adaptor.Func.GetMethodInfo().Name, - ActivationFunction = adaptor.Func + Name = adapter.Func.GetMethodInfo().Name, + ActivationFunction = adapter.Func }; } else { - throw new Exception("Could not interpret activation adaptor"); + throw new Exception("Could not interpret activation adapter"); } } } diff --git a/src/TensorFlowNET.Keras/Layers/LayersApi.cs b/src/TensorFlowNET.Keras/Layers/LayersApi.cs index 776feb48..9349f50f 100644 --- a/src/TensorFlowNET.Keras/Layers/LayersApi.cs +++ b/src/TensorFlowNET.Keras/Layers/LayersApi.cs @@ -94,7 +94,7 @@ namespace Tensorflow.Keras.Layers string data_format = "channels_last", int dilation_rate = 1, int groups = 1, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, bool use_bias = true, string kernel_initializer = "glorot_uniform", string bias_initializer = "zeros") @@ -109,7 +109,7 @@ namespace Tensorflow.Keras.Layers DilationRate = dilation_rate, Groups = groups, UseBias = use_bias, - Activation = keras.activations.GetActivationFromAdaptor(activation), + Activation = keras.activations.GetActivationFromAdapter(activation), KernelInitializer = GetInitializerByName(kernel_initializer), BiasInitializer = GetInitializerByName(bias_initializer) }); @@ -167,7 +167,7 @@ namespace Tensorflow.Keras.Layers string data_format = null, Shape dilation_rate = null, int groups = 1, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, bool use_bias = true, IInitializer kernel_initializer = null, IInitializer bias_initializer = null, @@ -190,7 +190,7 @@ namespace Tensorflow.Keras.Layers BiasInitializer = bias_initializer == null ? tf.zeros_initializer : bias_initializer, BiasRegularizer = bias_regularizer, ActivityRegularizer = activity_regularizer, - Activation = keras.activations.GetActivationFromAdaptor(activation), + Activation = keras.activations.GetActivationFromAdapter(activation), }); public ILayer Conv2D(int filters, Shape kernel_size = null, @@ -248,7 +248,7 @@ namespace Tensorflow.Keras.Layers string output_padding = "valid", string data_format = null, Shape dilation_rate = null, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, bool use_bias = true, string kernel_initializer = null, string bias_initializer = null, @@ -267,7 +267,7 @@ namespace Tensorflow.Keras.Layers UseBias = use_bias, KernelInitializer = GetInitializerByName(kernel_initializer), BiasInitializer = GetInitializerByName(bias_initializer), - Activation = keras.activations.GetActivationFromAdaptor(activation) + Activation = keras.activations.GetActivationFromAdapter(activation) }); public ILayer Conv2DTranspose(int filters, Shape kernel_size = null, @@ -317,7 +317,7 @@ namespace Tensorflow.Keras.Layers /// Constraint function for the bias. /// N-D tensor with shape: (batch_size, ..., units). For instance, for a 2D input with shape (batch_size, input_dim), the output would have shape (batch_size, units). public ILayer Dense(int units, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, IInitializer kernel_initializer = null, bool use_bias = true, IInitializer bias_initializer = null, @@ -330,7 +330,7 @@ namespace Tensorflow.Keras.Layers => new Dense(new DenseArgs { Units = units, - Activation = keras.activations.GetActivationFromAdaptor(activation), + Activation = keras.activations.GetActivationFromAdapter(activation), KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer, BiasInitializer = bias_initializer ?? (use_bias ? tf.zeros_initializer : null), InputShape = input_shape, @@ -386,7 +386,7 @@ namespace Tensorflow.Keras.Layers /// public Tensor dense(Tensor inputs, int units, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, bool use_bias = true, IInitializer kernel_initializer = null, IInitializer bias_initializer = null, @@ -405,7 +405,7 @@ namespace Tensorflow.Keras.Layers var layer = new Dense(new DenseArgs { Units = units, - Activation = keras.activations.GetActivationFromAdaptor(activation), + Activation = keras.activations.GetActivationFromAdapter(activation), UseBias = use_bias, BiasInitializer = bias_initializer, KernelInitializer = kernel_initializer, @@ -460,7 +460,7 @@ namespace Tensorflow.Keras.Layers public ILayer EinsumDense(string equation, Shape output_shape, string bias_axes, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, IInitializer kernel_initializer= null, IInitializer bias_initializer= null, IRegularizer kernel_regularizer= null, @@ -473,7 +473,7 @@ namespace Tensorflow.Keras.Layers Equation = equation, OutputShape = output_shape, BiasAxes = bias_axes, - Activation = keras.activations.GetActivationFromAdaptor(activation), + Activation = keras.activations.GetActivationFromAdapter(activation), KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer, BiasInitializer = bias_initializer ?? tf.zeros_initializer, KernelRegularizer = kernel_regularizer, @@ -807,7 +807,7 @@ namespace Tensorflow.Keras.Layers /// The name of the activation function to use. Default: hyperbolic tangent (tanh).. /// public ILayer SimpleRNN(int units, - ActivationAdaptor activation = null, + ActivationAdapter activation = null, string kernel_initializer = "glorot_uniform", string recurrent_initializer = "orthogonal", string bias_initializer = "zeros", @@ -816,7 +816,7 @@ namespace Tensorflow.Keras.Layers => new SimpleRNN(new SimpleRNNArgs { Units = units, - Activation = activation == null ? keras.activations.GetActivationFromAdaptor(activation): keras.activations.Tanh, + Activation = activation == null ? keras.activations.GetActivationFromAdapter(activation): keras.activations.Tanh, KernelInitializer = GetInitializerByName(kernel_initializer), RecurrentInitializer = GetInitializerByName(recurrent_initializer), BiasInitializer = GetInitializerByName(bias_initializer), @@ -869,8 +869,8 @@ namespace Tensorflow.Keras.Layers /// /// public ILayer LSTM(int units, - ActivationAdaptor activation = null, - ActivationAdaptor recurrent_activation = null, + ActivationAdapter activation = null, + ActivationAdapter recurrent_activation = null, bool use_bias = true, IInitializer kernel_initializer = null, IInitializer recurrent_initializer = null, @@ -888,8 +888,8 @@ namespace Tensorflow.Keras.Layers => new LSTM(new LSTMArgs { Units = units, - Activation = activation == null ? keras.activations.GetActivationFromAdaptor(activation) : keras.activations.Tanh, - RecurrentActivation = recurrent_activation == null ? keras.activations.GetActivationFromAdaptor(activation) : keras.activations.Sigmoid, + Activation = activation == null ? keras.activations.GetActivationFromAdapter(activation) : keras.activations.Tanh, + RecurrentActivation = recurrent_activation == null ? keras.activations.GetActivationFromAdapter(activation) : keras.activations.Sigmoid, KernelInitializer = kernel_initializer ?? tf.glorot_uniform_initializer, RecurrentInitializer = recurrent_initializer ?? tf.orthogonal_initializer, BiasInitializer = bias_initializer ?? tf.zeros_initializer,