@@ -101,6 +101,8 @@ namespace Tensorflow | |||||
name: name); | name: name); | ||||
public IActivation relu() => new relu(); | public IActivation relu() => new relu(); | ||||
public IActivation swish() => new swish(); | public IActivation swish() => new swish(); | ||||
public IActivation tanh() => new tanh(); | public IActivation tanh() => new tanh(); | ||||
@@ -111,6 +113,9 @@ namespace Tensorflow | |||||
public Tensor relu(Tensor features, string name = null) | public Tensor relu(Tensor features, string name = null) | ||||
=> gen_nn_ops.relu(features, name); | => gen_nn_ops.relu(features, name); | ||||
public Tensor relu6(Tensor features, string name = null) | |||||
=> gen_nn_ops.relu6(features, name); | |||||
public Tensor[] fused_batch_norm(Tensor x, | public Tensor[] fused_batch_norm(Tensor x, | ||||
Tensor scale, | Tensor scale, | ||||
Tensor offset, | Tensor offset, | ||||
@@ -32,6 +32,7 @@ namespace Tensorflow.Keras | |||||
Activation Linear { get; } | Activation Linear { get; } | ||||
Activation Relu { get; } | Activation Relu { get; } | ||||
Activation Relu6 { get; } | |||||
Activation Sigmoid { get; } | Activation Sigmoid { get; } | ||||
@@ -180,6 +180,9 @@ namespace Tensorflow.Keras.Layers | |||||
public ILayer Normalization(Shape? input_shape = null, int? axis = -1, float? mean = null, float? variance = null, bool invert = false); | public ILayer Normalization(Shape? input_shape = null, int? axis = -1, float? mean = null, float? variance = null, bool invert = false); | ||||
public ILayer LeakyReLU(float alpha = 0.3f); | public ILayer LeakyReLU(float alpha = 0.3f); | ||||
public ILayer ReLU6(); | |||||
public IRnnCell LSTMCell(int uints, | public IRnnCell LSTMCell(int uints, | ||||
string activation = "tanh", | string activation = "tanh", | ||||
string recurrent_activation = "sigmoid", | string recurrent_activation = "sigmoid", | ||||
@@ -20,6 +20,11 @@ namespace Tensorflow.Keras | |||||
Name = "relu", | Name = "relu", | ||||
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu", name, new ExecuteOpArgs(features)) | ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu", name, new ExecuteOpArgs(features)) | ||||
}; | }; | ||||
private static Activation _relu6 = new Activation() | |||||
{ | |||||
Name = "relu6", | |||||
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu6", name, new ExecuteOpArgs(features)) | |||||
}; | |||||
private static Activation _sigmoid = new Activation() | private static Activation _sigmoid = new Activation() | ||||
{ | { | ||||
Name = "sigmoid", | Name = "sigmoid", | ||||
@@ -55,6 +60,7 @@ namespace Tensorflow.Keras | |||||
_nameActivationMap = new Dictionary<string, Activation>(); | _nameActivationMap = new Dictionary<string, Activation>(); | ||||
RegisterActivation(_relu); | RegisterActivation(_relu); | ||||
RegisterActivation(_relu6); | |||||
RegisterActivation(_linear); | RegisterActivation(_linear); | ||||
RegisterActivation(_sigmoid); | RegisterActivation(_sigmoid); | ||||
RegisterActivation(_softmax); | RegisterActivation(_softmax); | ||||
@@ -65,6 +71,7 @@ namespace Tensorflow.Keras | |||||
public Activation Linear => _linear; | public Activation Linear => _linear; | ||||
public Activation Relu => _relu; | public Activation Relu => _relu; | ||||
public Activation Relu6 => _relu6; | |||||
public Activation Sigmoid => _sigmoid; | public Activation Sigmoid => _sigmoid; | ||||
@@ -0,0 +1,25 @@ | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Text; | |||||
using Tensorflow.Keras.ArgsDefinition; | |||||
using Tensorflow.Keras.Engine; | |||||
using Tensorflow.Common.Types; | |||||
using static Tensorflow.Binding; | |||||
namespace Tensorflow.Keras.Layers | |||||
{ | |||||
/// <summary> | |||||
/// Leaky version of a Rectified Linear Unit. | |||||
/// </summary> | |||||
public class ReLu6 : Layer | |||||
{ | |||||
public ReLu6() : base(new LayerArgs { }) | |||||
{ | |||||
} | |||||
protected override Tensors Call(Tensors inputs, Tensors state = null, bool? training = null, IOptionalArgs? optional_args = null) | |||||
{ | |||||
return tf.nn.relu6(inputs); | |||||
} | |||||
} | |||||
} |
@@ -735,6 +735,15 @@ namespace Tensorflow.Keras.Layers | |||||
}); | }); | ||||
/// <summary> | |||||
/// Leaky version of a Rectified Linear Unit. | |||||
/// </summary> | |||||
/// <param name="alpha">Negative slope coefficient.</param> | |||||
/// <returns></returns> | |||||
public ILayer ReLU6() | |||||
=> new ReLu6(); | |||||
public IRnnCell SimpleRNNCell( | public IRnnCell SimpleRNNCell( | ||||
int units, | int units, | ||||
string activation = "tanh", | string activation = "tanh", | ||||