Browse Source

Add LeakyReLU layer.

tags/v0.30
Oceania2018 4 years ago
parent
commit
3e76c19879
4 changed files with 78 additions and 0 deletions
  1. +14
    -0
      src/TensorFlowNET.Core/Keras/ArgsDefinition/LeakyReLUArgs.cs
  2. +11
    -0
      src/TensorFlowNET.Keras/Layers/LayersApi.cs
  3. +27
    -0
      src/TensorFlowNET.Keras/Layers/LeakyReLU.cs
  4. +26
    -0
      test/TensorFlowNET.UnitTest/Keras/ActivationTest.cs

+ 14
- 0
src/TensorFlowNET.Core/Keras/ArgsDefinition/LeakyReLUArgs.cs View File

@@ -0,0 +1,14 @@
using System;
using System.Collections.Generic;
using System.Text;

namespace Tensorflow.Keras.ArgsDefinition
{
public class LeakyReLUArgs : LayerArgs
{
/// <summary>
/// Negative slope coefficient.
/// </summary>
public float Alpha { get; set; } = 0.3f;
}
}

+ 11
- 0
src/TensorFlowNET.Keras/Layers/LayersApi.cs View File

@@ -315,6 +315,17 @@ namespace Tensorflow.Keras.Layers
return layer.Apply(inputs);
}

/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
/// <param name="alpha">Negative slope coefficient.</param>
/// <returns></returns>
public Layer LeakyReLU(float alpha = 0.3f)
=> new LeakyReLU(new LeakyReLUArgs
{
Alpha = alpha
});

public Layer LSTM(int units,
Activation activation = null,
Activation recurrent_activation = null,


+ 27
- 0
src/TensorFlowNET.Keras/Layers/LeakyReLU.cs View File

@@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.Text;
using Tensorflow.Keras.ArgsDefinition;
using Tensorflow.Keras.Engine;
using static Tensorflow.Binding;

namespace Tensorflow.Keras.Layers
{
/// <summary>
/// Leaky version of a Rectified Linear Unit.
/// </summary>
public class LeakyReLU : Layer
{
LeakyReLUArgs args;
float alpha => args.Alpha;
public LeakyReLU(LeakyReLUArgs args) : base(args)
{
this.args = args;
}

protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
{
return tf.nn.leaky_relu(inputs, alpha: alpha);
}
}
}

+ 26
- 0
test/TensorFlowNET.UnitTest/Keras/ActivationTest.cs View File

@@ -0,0 +1,26 @@
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Collections.Generic;
using System.Text;
using NumSharp;
using static Tensorflow.KerasApi;
using Tensorflow;

namespace TensorFlowNET.UnitTest.Keras
{
[TestClass]
public class ActivationTest : EagerModeTestBase
{
[TestMethod]
public void LeakyReLU()
{
var layer = keras.layers.LeakyReLU();
Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
var outputArray = output.ToArray<float>();
assertFloat32Equal(-0.9f, outputArray[0], "Not equal");
assertFloat32Equal(-0.3f, outputArray[1], "Not equal");
assertFloat32Equal(0.0f, outputArray[2], "Not equal");
assertFloat32Equal(2.0f, outputArray[3], "Not equal");
}
}
}

Loading…
Cancel
Save