Browse Source

Added activations

tags/v0.60-tf.numpy
Kevin Haiping 4 years ago
parent
commit
033fb7e3bb
12 changed files with 275 additions and 19 deletions
  1. +9
    -0
      src/TensorFlowNET.Core/Keras/ArgsDefinition/Activation/SoftmaxArgs.cs
  2. +2
    -4
      src/TensorFlowNET.Keras/Layers/Activation/ELU.cs
  3. +24
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Exponential.cs
  4. +22
    -0
      src/TensorFlowNET.Keras/Layers/Activation/HardSigmoid.cs
  5. +3
    -1
      src/TensorFlowNET.Keras/Layers/Activation/SELU.cs
  6. +24
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Softmax.cs
  7. +22
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Softplus.cs
  8. +22
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Softsign.cs
  9. +23
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Swish.cs
  10. +22
    -0
      src/TensorFlowNET.Keras/Layers/Activation/Tanh.cs
  11. +22
    -0
      src/TensorFlowNET.Keras/Layers/LayersApi.Activation.cs
  12. +80
    -14
      test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs

+ 9
- 0
src/TensorFlowNET.Core/Keras/ArgsDefinition/Activation/SoftmaxArgs.cs View File

@@ -0,0 +1,9 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.ArgsDefinition {
public class SoftmaxArgs : LayerArgs {
public Axis axis { get; set; } = -1;
}
}

+ 2
- 4
src/TensorFlowNET.Keras/Layers/Activation/ELU.cs View File

@@ -24,12 +24,10 @@ namespace Tensorflow.Keras.Layers {
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor output = inputs;
if ( alpha != 1f ) {
output = tf.where(output > 0f, output, alpha * (tf.exp(output) - 1f));
}
output = tf.where(output > 0f, output,
tf.multiply(alpha, tf.sub(tf.exp(output), 1f)));
return output;
}

public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}


+ 24
- 0
src/TensorFlowNET.Keras/Layers/Activation/Exponential.cs View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers {
public class Exponential : Layer {
public Exponential ( LayerArgs args ) : base(args) {
// Exponential has no args
}
protected override void build ( Tensors inputs ) {
built = true;
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor output = inputs;
return tf.exp(output);
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}
}
}

+ 22
- 0
src/TensorFlowNET.Keras/Layers/Activation/HardSigmoid.cs View File

@@ -0,0 +1,22 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers {
public class HardSigmoid : Layer {
public HardSigmoid ( LayerArgs args ) : base(args) {
// hard sigmoid has no arguments
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor x = inputs;
return tf.clip_by_value(
tf.add(tf.multiply(x, 0.2f), 0.5f), 0f, 1f);
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}
}
}

+ 3
- 1
src/TensorFlowNET.Keras/Layers/Activation/SELU.cs View File

@@ -23,7 +23,9 @@ namespace Tensorflow.Keras.Layers {
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor output = inputs;
return tf.where(output > 0f, scale * output, scale * alpha * (tf.exp(output) - 1f));
return tf.where(output > 0f,
tf.multiply(scale, output),
tf.multiply(scale, tf.multiply(alpha, tf.sub(tf.exp(output), 1f))));
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;


+ 24
- 0
src/TensorFlowNET.Keras/Layers/Activation/Softmax.cs View File

@@ -0,0 +1,24 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers {
public class Softmax : Layer {
Axis axis;
public Softmax ( SoftmaxArgs args ) : base(args) {
axis = args.axis;
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor x = inputs;
Tensor e = tf.exp(tf.sub(x, tf.reduce_max(x, axis: this.axis, keepdims: true)));
Tensor s = tf.reduce_sum(e, axis: this.axis, keepdims: true);
return tf.div(e, s);
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}
}
}

+ 22
- 0
src/TensorFlowNET.Keras/Layers/Activation/Softplus.cs View File

@@ -0,0 +1,22 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers {
public class Softplus : Layer {
public Softplus ( LayerArgs args ) : base(args) {
// Softplus has no arguments
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor x = inputs;
return tf.log(
tf.add(tf.exp(x), 1f));
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}
}
}

+ 22
- 0
src/TensorFlowNET.Keras/Layers/Activation/Softsign.cs View File

@@ -0,0 +1,22 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers {
public class Softsign : Layer {
public Softsign ( LayerArgs args ) : base(args) {
// Softsign has no arguments
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor x = inputs;
// x / (abs(x) + 1)
return tf.div(x, tf.add(1f, tf.abs(x)));
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}
}
}

+ 23
- 0
src/TensorFlowNET.Keras/Layers/Activation/Swish.cs View File

@@ -0,0 +1,23 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers {
public class Swish : Layer {
public Swish ( LayerArgs args ) : base(args) {
// Swish has no arguments
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor x = inputs;

// x / (1 + exp(-x))
return tf.div(x, (tf.add(1f, tf.exp(tf.negative(x)))));
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}
}
}

+ 22
- 0
src/TensorFlowNET.Keras/Layers/Activation/Tanh.cs View File

@@ -0,0 +1,22 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers {
public class Tanh : Layer {
public Tanh ( LayerArgs args ) : base(args) {
// Tanh has no arguments
}
protected override Tensors Call ( Tensors inputs, Tensor state = null, bool? training = null ) {
Tensor x = inputs;

return tf.tanh(x);
}
public override Shape ComputeOutputShape ( Shape input_shape ) {
return input_shape;
}
}
}

+ 22
- 0
src/TensorFlowNET.Keras/Layers/LayersApi.Activation.cs View File

@@ -0,0 +1,22 @@
using Tensorflow.NumPy;
using System.Collections.Generic;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;
using static Tensorflow.KerasApi;

namespace Tensorflow.Keras.Layers {
public partial class LayersApi {
public ELU ELU ( float alpha = 0.1f )
=> new ELU(new ELUArgs { Alpha = alpha });
public SELU SELU ()
=> new SELU(new LayerArgs { });
public Softmax Softmax ( Axis axis ) => new Softmax(new SoftmaxArgs { axis = axis });
public Softplus Softplus () => new Softplus(new LayerArgs { });
public HardSigmoid HardSigmoid () => new HardSigmoid(new LayerArgs { });
public Softsign Softsign () => new Softsign(new LayerArgs { });
public Swish Swish () => new Swish(new LayerArgs { });
public Tanh Tanh () => new Tanh(new LayerArgs { });
public Exponential Exponential () => new Exponential(new LayerArgs { });
}
}

+ 80
- 14
test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs View File

@@ -1,22 +1,88 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Text;
using static Tensorflow.Binding;
using Tensorflow.NumPy;
using static Tensorflow.KerasApi;
using Tensorflow;

namespace TensorFlowNET.Keras.UnitTest
{
[TestClass]
public class ActivationTest : EagerModeTestBase
{
[TestMethod]
public void LeakyReLU()
{
var layer = keras.layers.LeakyReLU();
Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
Equal(new[] { -0.9f, -0.3f, 0.0f, 2.0f }, output.ToArray<float>());
}
}
namespace TensorFlowNET.Keras.UnitTest {
[TestClass]
public class ActivationTest : EagerModeTestBase {
[TestMethod]
public void LeakyReLU () {
var layer = keras.layers.LeakyReLU();
Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
Equal(new[] { -0.9f, -0.3f, 0.0f, 2.0f }, output.ToArray<float>());
}

[TestMethod]
public void ELU () {
Tensors input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.ELU().Apply(input);
NDArray expected = new NDArray(new float[] { -0.0950213f, -0.08646648f, -0.06321206f, 0f, 1f, 2f });
Assert.AreEqual(expected.numpy(), output.numpy());
}

[TestMethod]
public void SELU () {
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.SELU().Apply(input);
NDArray expected = new NDArray(new float[] { -1.6705688f, -1.5201665f, -1.1113307f, 0f, 1.050701f, 2.101402f });
Assert.AreEqual(expected.numpy(), output.numpy());
}

[TestMethod]
public void Softmax () {
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.Softmax(new Axis(-1)).Apply(input);
NDArray expected = new NDArray(new float[] { 0.0042697787f, 0.011606461f, 0.031549633f, 0.085760795f, 0.23312202f, 0.6336913f });
Assert.AreEqual(expected.numpy(), output.numpy());
}

[TestMethod]
public void Softplus () {
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.Softplus().Apply(input);
NDArray expected = new NDArray(new float[] { 0.04858733f, 0.12692805f, 0.31326166f, 0.6931472f, 1.3132616f, 2.126928f });
Assert.AreEqual(expected, output.numpy());
}

[TestMethod]
public void Softsign () {
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.Softsign().Apply(input);
NDArray expected = new NDArray(new float[] { -0.75f, -0.66666667f, -0.5f, 0f, 0.5f, 0.66666667f });
Assert.AreEqual(expected, output.numpy());
}


[TestMethod]
public void Exponential () {
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.Exponential().Apply(input);
NDArray expected = new NDArray(new float[] { 0.049787067f, 0.13533528f, 0.36787945f, 1f, 2.7182817f, 7.389056f });
Assert.AreEqual(expected, output.numpy());
}

[TestMethod]
public void HardSigmoid () {
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.HardSigmoid().Apply(input);
// Note, this should be [0, 0.1, 0.3, 0.5, 0.7, 0.9]
// But somehow the second element will have 0.099999994
// Probably because there is an accuracy loss somewhere
NDArray expected = new NDArray(new float[] { 0f, 0.099999994f, 0.3f, 0.5f, 0.7f, 0.9f });
Assert.AreEqual(expected, output.numpy());
}


[TestMethod]
public void Swish () {
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.Swish().Apply(input);
NDArray expected = new NDArray(new float[] { -0.14227762f, -0.23840584f, -0.26894143f, 0f, 0.7310586f, 1.761594f });
Assert.AreEqual(expected, output.numpy());
}
}
}

Loading…
Cancel
Save