Browse Source

add relu6

tags/v0.150.0-BERT-Model
dogvane 1 year ago
parent
commit
ec4f372a29
6 changed files with 50 additions and 0 deletions
  1. +5
    -0
      src/TensorFlowNET.Core/APIs/tf.nn.cs
  2. +1
    -0
      src/TensorFlowNET.Core/Keras/Activations/Activations.cs
  3. +3
    -0
      src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs
  4. +7
    -0
      src/TensorFlowNET.Keras/Activations.cs
  5. +25
    -0
      src/TensorFlowNET.Keras/Layers/Activation/ReLu6.cs
  6. +9
    -0
      src/TensorFlowNET.Keras/Layers/LayersApi.cs

+ 5
- 0
src/TensorFlowNET.Core/APIs/tf.nn.cs View File

@@ -101,6 +101,8 @@ namespace Tensorflow
name: name);

public IActivation relu() => new relu();


public IActivation swish() => new swish();
public IActivation tanh() => new tanh();

@@ -111,6 +113,9 @@ namespace Tensorflow
public Tensor relu(Tensor features, string name = null)
=> gen_nn_ops.relu(features, name);

public Tensor relu6(Tensor features, string name = null)
=> gen_nn_ops.relu6(features, name);

public Tensor[] fused_batch_norm(Tensor x,
Tensor scale,
Tensor offset,


+ 1
- 0
src/TensorFlowNET.Core/Keras/Activations/Activations.cs View File

@@ -32,6 +32,7 @@ namespace Tensorflow.Keras
Activation Linear { get; }

Activation Relu { get; }
Activation Relu6 { get; }

Activation Sigmoid { get; }



+ 3
- 0
src/TensorFlowNET.Core/Keras/Layers/ILayersApi.cs View File

@@ -180,6 +180,9 @@ namespace Tensorflow.Keras.Layers
public ILayer Normalization(Shape? input_shape = null, int? axis = -1, float? mean = null, float? variance = null, bool invert = false);
public ILayer LeakyReLU(float alpha = 0.3f);

public ILayer ReLU6();


public IRnnCell LSTMCell(int uints,
string activation = "tanh",
string recurrent_activation = "sigmoid",


+ 7
- 0
src/TensorFlowNET.Keras/Activations.cs View File

@@ -20,6 +20,11 @@ namespace Tensorflow.Keras
Name = "relu",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu", name, new ExecuteOpArgs(features))
};
private static Activation _relu6 = new Activation()
{
Name = "relu6",
ActivationFunction = (features, name) => tf.Context.ExecuteOp("Relu6", name, new ExecuteOpArgs(features))
};
private static Activation _sigmoid = new Activation()
{
Name = "sigmoid",
@@ -55,6 +60,7 @@ namespace Tensorflow.Keras
_nameActivationMap = new Dictionary<string, Activation>();

RegisterActivation(_relu);
RegisterActivation(_relu6);
RegisterActivation(_linear);
RegisterActivation(_sigmoid);
RegisterActivation(_softmax);
@@ -65,6 +71,7 @@ namespace Tensorflow.Keras
public Activation Linear => _linear;

public Activation Relu => _relu;
public Activation Relu6 => _relu6;

public Activation Sigmoid => _sigmoid;



+ 25
- 0
src/TensorFlowNET.Keras/Layers/Activation/ReLu6.cs View File

@@ -0,0 +1,25 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using Tensorflow.Common.Types;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
public class ReLu6 : Layer
{
public ReLu6() : base(new LayerArgs { })
{
}

protected override Tensors Call(Tensors inputs, Tensors state = null, bool? training = null, IOptionalArgs? optional_args = null)
{
return tf.nn.relu6(inputs);
}
}
}

+ 9
- 0
src/TensorFlowNET.Keras/Layers/LayersApi.cs View File

@@ -735,6 +735,15 @@ namespace Tensorflow.Keras.Layers
});


/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
/// <param name="alpha">Negative slope coefficient.</param>
/// <returns></returns>
public ILayer ReLU6()
=> new ReLu6();


public IRnnCell SimpleRNNCell(
int units,
string activation = "tanh",


Loading…
Cancel
Save