You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

ActivationTest.cs 4.0 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798
  1. using Microsoft.VisualStudio.TestTools.UnitTesting;
  2. using System;
  3. using System.Collections.Generic;
  4. using static Tensorflow.Binding;
  5. using Tensorflow.NumPy;
  6. using static Tensorflow.KerasApi;
  7. using Tensorflow;
  8. namespace TensorFlowNET.Keras.UnitTest {
  9. [TestClass]
  10. public class ActivationTest : EagerModeTestBase
  11. {
  12. [TestMethod]
  13. public void LeakyReLU()
  14. {
  15. var layer = keras.layers.LeakyReLU();
  16. Tensor output = layer.Apply(np.array(-3.0f, -1.0f, 0.0f, 2.0f));
  17. Equal(new[] { -0.9f, -0.3f, 0.0f, 2.0f }, output.ToArray<float>());
  18. }
  19. [TestMethod]
  20. public void ELU()
  21. {
  22. Tensors input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  23. Tensor output = keras.layers.ELU().Apply(input);
  24. NDArray expected = new NDArray(new float[] { -0.0950213f, -0.08646648f, -0.06321206f, 0f, 1f, 2f });
  25. Assert.AreEqual(expected.numpy(), output.numpy());
  26. }
  27. [TestMethod]
  28. public void SELU()
  29. {
  30. Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  31. Tensor output = keras.layers.SELU().Apply(input);
  32. NDArray expected = new NDArray(new float[] { -1.6705688f, -1.5201665f, -1.1113307f, 0f, 1.050701f, 2.101402f });
  33. Assert.AreEqual(expected.numpy(), output.numpy());
  34. }
  35. [TestMethod]
  36. public void Softmax()
  37. {
  38. Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  39. Tensor output = keras.layers.Softmax(new Axis(-1)).Apply(input);
  40. var expected = new float[] { 0.0042697787f, 0.011606461f, 0.031549633f, 0.085760795f, 0.23312202f, 0.6336913f };
  41. Assert.IsTrue(Equal(expected, output.ToArray<float>()));
  42. }
  43. [TestMethod]
  44. public void Softplus()
  45. {
  46. Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  47. Tensor output = keras.layers.Softplus().Apply(input);
  48. NDArray expected = new NDArray(new float[] { 0.04858733f, 0.12692805f, 0.31326166f, 0.6931472f, 1.3132616f, 2.126928f });
  49. Assert.AreEqual(expected, output.numpy());
  50. }
  51. [TestMethod]
  52. public void Softsign()
  53. {
  54. Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  55. Tensor output = keras.layers.Softsign().Apply(input);
  56. NDArray expected = new NDArray(new float[] { -0.75f, -0.66666667f, -0.5f, 0f, 0.5f, 0.66666667f });
  57. Assert.AreEqual(expected, output.numpy());
  58. }
  59. [TestMethod]
  60. public void Exponential()
  61. {
  62. Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  63. Tensor output = keras.layers.Exponential().Apply(input);
  64. var expected = new float[] { 0.049787067f, 0.13533528f, 0.36787945f, 1f, 2.7182817f, 7.389056f };
  65. Assert.IsTrue(Equal(expected, output.ToArray<float>()));
  66. }
  67. [TestMethod]
  68. public void HardSigmoid()
  69. {
  70. Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  71. Tensor output = keras.layers.HardSigmoid().Apply(input);
  72. // Note, this should be [0, 0.1, 0.3, 0.5, 0.7, 0.9]
  73. // But somehow the second element will have 0.099999994
  74. // Probably because there is an accuracy loss somewhere
  75. NDArray expected = new NDArray(new float[] { 0f, 0.099999994f, 0.3f, 0.5f, 0.7f, 0.9f });
  76. Assert.AreEqual(expected, output.numpy());
  77. }
  78. [TestMethod]
  79. public void Swish()
  80. {
  81. Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
  82. Tensor output = keras.layers.Swish().Apply(input);
  83. NDArray expected = new NDArray(new float[] { -0.14227762f, -0.23840584f, -0.26894143f, 0f, 0.7310586f, 1.761594f });
  84. Assert.AreEqual(expected, output.numpy());
  85. }
  86. }
  87. }