Browse Source

Add activations.mish.

tags/v0.100.4-load-saved-model
Haiping Chen 2 years ago
parent
commit
d794576486
5 changed files with 37 additions and 1 deletions
  1. +8
    -0
      src/TensorFlowNET.Core/APIs/tf.math.cs
  2. +10
    -0
      src/TensorFlowNET.Core/Gradients/nn_grad.cs
  3. +3
    -0
      src/TensorFlowNET.Core/Operations/nn_ops.cs
  4. +5
    -1
      src/TensorFlowNET.Keras/Activations.cs
  5. +11
    -0
      test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs

+ 8
- 0
src/TensorFlowNET.Core/APIs/tf.math.cs View File

@@ -14,6 +14,8 @@
limitations under the License.
******************************************************************************/

using Tensorflow.Operations;

namespace Tensorflow
{
public partial class tensorflow
@@ -50,6 +52,12 @@ namespace Tensorflow
public Tensor sum(Tensor x, Axis? axis = null, string name = null)
=> math_ops.reduce_sum(x, axis: axis, name: name);

public Tensor softplus(Tensor features, string name = null)
=> nn_ops.softplus(features, name: name);

public Tensor tanh(Tensor x, string name = null)
=> math_ops.tanh(x, name: name);
/// <summary>
/// Finds values and indices of the `k` largest entries for the last dimension.
/// </summary>


+ 10
- 0
src/TensorFlowNET.Core/Gradients/nn_grad.cs View File

@@ -120,6 +120,16 @@ namespace Tensorflow.Gradients
};
}

[RegisterGradient("Softplus")]
public static Tensor[] _SoftplusGrad(Operation op, Tensor[] grads)
{
var grad = grads[0];
var x = op.inputs[0];

var softplus = grad * math_ops.sigmoid(x);
return new Tensor[] { softplus };
}

[RegisterGradient("SquaredDifference")]
public static Tensor[] _SquaredDifferenceGrad(Operation op, Tensor[] grads)
{


+ 3
- 0
src/TensorFlowNET.Core/Operations/nn_ops.cs View File

@@ -132,6 +132,9 @@ namespace Tensorflow
return _softmax(logits, gen_nn_ops.softmax, axis, name);
}

public static Tensor softplus(Tensor features, string name = null)
=> tf.Context.ExecuteOp("Softplus", name, new ExecuteOpArgs(features));

public static Tensor l2_loss(Tensor t, string name = null)
=> tf.Context.ExecuteOp("L2Loss", name, new ExecuteOpArgs(t));



+ 5
- 1
src/TensorFlowNET.Keras/Activations.cs View File

@@ -20,12 +20,14 @@ namespace Tensorflow.Keras
=> tf.Context.ExecuteOp("Softmax", name, new ExecuteOpArgs(features));
private static Activation _tanh = (features, name)
=> tf.Context.ExecuteOp("Tanh", name, new ExecuteOpArgs(features));
private static Activation _mish = (features, name)
=> features * tf.math.tanh(tf.math.softplus(features));

/// <summary>
/// Register the name-activation mapping in this static class.
/// </summary>
/// <param name="name"></param>
/// <param name="Activation"></param>
/// <param name="activation"></param>
private static void RegisterActivation(string name, Activation activation)
{
_nameActivationMap[name] = activation;
@@ -42,6 +44,7 @@ namespace Tensorflow.Keras
RegisterActivation("sigmoid", _sigmoid);
RegisterActivation("softmax", _softmax);
RegisterActivation("tanh", _tanh);
RegisterActivation("mish", _mish);
}

public Activation Linear => _linear;
@@ -54,6 +57,7 @@ namespace Tensorflow.Keras

public Activation Tanh => _tanh;

public Activation Mish => _mish;

public static Activation GetActivationByName(string name)
{


+ 11
- 0
test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs View File

@@ -94,5 +94,16 @@ namespace TensorFlowNET.Keras.UnitTest {
NDArray expected = new NDArray(new float[] { -0.14227762f, -0.23840584f, -0.26894143f, 0f, 0.7310586f, 1.761594f });
Assert.AreEqual(expected, output.numpy());
}

/// <summary>
/// https://www.tensorflow.org/addons/api_docs/python/tfa/activations/mish
/// </summary>
[TestMethod]
public void Mish()
{
var x = tf.constant(new[] { 1.0, 0.0, 1.0 }, dtype: tf.float32);
var output = keras.activations.Mish(x);
Assert.AreEqual(new[] { 0.86509836f, 0f, 0.86509836f }, output.numpy());
}
}
}

Loading…
Cancel
Save