diff --git a/src/TensorFlowNET.Core/Keras/ArgsDefinition/LeakyReLUArgs.cs b/src/TensorFlowNET.Core/Keras/ArgsDefinition/LeakyReLUArgs.cs
new file mode 100644
index 00000000..c62d7a12
--- /dev/null
+++ b/src/TensorFlowNET.Core/Keras/ArgsDefinition/LeakyReLUArgs.cs
@@ -0,0 +1,14 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+
+namespace Tensorflow.Keras.ArgsDefinition
+{
+ public class LeakyReLUArgs : LayerArgs
+ {
+ ///
+ /// Negative slope coefficient.
+ ///
+ public float Alpha { get; set; } = 0.3f;
+ }
+}
diff --git a/src/TensorFlowNET.Keras/Layers/LayersApi.cs b/src/TensorFlowNET.Keras/Layers/LayersApi.cs
index ebdd09f6..4b770cda 100644
--- a/src/TensorFlowNET.Keras/Layers/LayersApi.cs
+++ b/src/TensorFlowNET.Keras/Layers/LayersApi.cs
@@ -315,6 +315,17 @@ namespace Tensorflow.Keras.Layers
return layer.Apply(inputs);
}
+ ///
+ /// Leaky version of a Rectified Linear Unit.
+ ///
+ /// Negative slope coefficient.
+ ///
+ public Layer LeakyReLU(float alpha = 0.3f)
+ => new LeakyReLU(new LeakyReLUArgs
+ {
+ Alpha = alpha
+ });
+
public Layer LSTM(int units,
Activation activation = null,
Activation recurrent_activation = null,
diff --git a/src/TensorFlowNET.Keras/Layers/LeakyReLU.cs b/src/TensorFlowNET.Keras/Layers/LeakyReLU.cs
new file mode 100644
index 00000000..9693c466
--- /dev/null
+++ b/src/TensorFlowNET.Keras/Layers/LeakyReLU.cs
@@ -0,0 +1,27 @@
+using System;
+using System.Collections.Generic;
+using System.Text;
+using Tensorflow.Keras.ArgsDefinition;
+using Tensorflow.Keras.Engine;
+using static Tensorflow.Binding;
+
+namespace Tensorflow.Keras.Layers
+{
+ ///
+ /// Leaky version of a Rectified Linear Unit.
+ ///
+ public class LeakyReLU : Layer
+ {
+ LeakyReLUArgs args;
+ float alpha => args.Alpha;
+ public LeakyReLU(LeakyReLUArgs args) : base(args)
+ {
+ this.args = args;
+ }
+
+ protected override Tensors Call(Tensors inputs, Tensor state = null, bool is_training = false)
+ {
+ return tf.nn.leaky_relu(inputs, alpha: alpha);
+ }
+ }
+}
diff --git a/test/TensorFlowNET.UnitTest/Keras/ActivationTest.cs b/test/TensorFlowNET.UnitTest/Keras/ActivationTest.cs
new file mode 100644
index 00000000..d26a2ac7
--- /dev/null
+++ b/test/TensorFlowNET.UnitTest/Keras/ActivationTest.cs
@@ -0,0 +1,26 @@
+using Microsoft.VisualStudio.TestTools.UnitTesting;
+using System;
+using System.Collections.Generic;
+using System.Text;
+using NumSharp;
+using static Tensorflow.KerasApi;
+using Tensorflow;
+
+namespace TensorFlowNET.UnitTest.Keras
+{
+ [TestClass]
+ public class ActivationTest : EagerModeTestBase
+ {
+ [TestMethod]
+ public void LeakyReLU()
+ {
+ var layer = keras.layers.LeakyReLU();
+ Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
+ var outputArray = output.ToArray();
+ assertFloat32Equal(-0.9f, outputArray[0], "Not equal");
+ assertFloat32Equal(-0.3f, outputArray[1], "Not equal");
+ assertFloat32Equal(0.0f, outputArray[2], "Not equal");
+ assertFloat32Equal(2.0f, outputArray[3], "Not equal");
+ }
+ }
+}