Browse Source

test: add tolorance to float NDArray comparison.

pull/1047/head
AsakusaRinne 2 years ago
parent
commit
f76857f6d3
3 changed files with 3 additions and 3 deletions
  1. +1
    -1
      test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs
  2. +1
    -1
      test/TensorFlowNET.Keras.UnitTest/Layers/AttentionTest.cs
  3. +1
    -1
      test/TensorFlowNET.Keras.UnitTest/Losses/LossesTest.cs

+ 1
- 1
test/TensorFlowNET.Keras.UnitTest/Layers/ActivationTest.cs View File

@@ -49,7 +49,7 @@ namespace Tensorflow.Keras.UnitTest.Layers
Tensor input = tf.constant(new float[] { -3f, -2f, -1f, 0f, 1f, 2f });
Tensor output = keras.layers.Softplus().Apply(input);
NDArray expected = new NDArray(new float[] { 0.04858733f, 0.12692805f, 0.31326166f, 0.6931472f, 1.3132616f, 2.126928f });
Assert.AreEqual(expected, output.numpy());
Assert.IsTrue(expected == output.numpy());
}

[TestMethod]


+ 1
- 1
test/TensorFlowNET.Keras.UnitTest/Layers/AttentionTest.cs View File

@@ -94,7 +94,7 @@ namespace Tensorflow.Keras.UnitTest.Layers
{ 7.6400003f, 12.24f, 16.84f },
{ 14.24f, 22.84f, 31.439999f }
} }, dtype: np.float32);
Assert.AreEqual(expected, actual.numpy());
Assert.IsTrue(expected == actual.numpy());
}

[TestMethod]


+ 1
- 1
test/TensorFlowNET.Keras.UnitTest/Losses/LossesTest.cs View File

@@ -39,7 +39,7 @@ public class LossesTest : EagerModeTestBase
// Using 'none' reduction type.
bce = tf.keras.losses.BinaryCrossentropy(from_logits: true, reduction: Reduction.NONE);
loss = bce.Call(y_true, y_pred);
Assert.AreEqual(new float[] { 0.23515666f, 1.4957594f }, loss.numpy());
Assert.IsTrue(new NDArray(new float[] { 0.23515666f, 1.4957594f }) == loss.numpy());
}

/// <summary>


Loading…
Cancel
Save