You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

GradientEagerTest.cs 3.3 kB

5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. using Microsoft.VisualStudio.TestTools.UnitTesting;
  2. using System;
  3. using System.Linq;
  4. using static Tensorflow.Binding;
  5. namespace Tensorflow.Native.UnitTest.Eager
  6. {
  7. [TestClass]
  8. public class GradientEagerTest
  9. {
  10. [TestMethod]
  11. public void ConstantSquare()
  12. {
  13. // Calcute the gradient of w * w
  14. // by Automatic Differentiation in Eager mode
  15. // in tensorflow.net 2.x that is in development intensively
  16. var w = tf.constant(1.5f);
  17. using var tape = tf.GradientTape();
  18. tape.watch(w);
  19. var loss = w * w;
  20. var grad = tape.gradient(loss, w);
  21. Assert.AreEqual((float)grad, 3.0f);
  22. }
  23. /// <summary>
  24. /// Calcute the gradient of w * w * w
  25. /// 高阶梯度
  26. /// </summary>
  27. [TestMethod]
  28. public void HighGradient()
  29. {
  30. var x = tf.Variable(1.0f);
  31. using var tape1 = tf.GradientTape();
  32. using var tape2 = tf.GradientTape();
  33. var y = x * x * x;
  34. tape2.Dispose();
  35. var dy_dx = tape2.gradient(y, x);
  36. Assert.AreEqual((float)dy_dx, 3.0f);
  37. tape1.Dispose();
  38. var d2y_d2x = tape1.gradient(dy_dx, x);
  39. Assert.AreEqual((float)d2y_d2x, 6.0f);
  40. }
  41. [TestMethod]
  42. public void ConstantMultiply()
  43. {
  44. var x = tf.ones((2, 2));
  45. using var tape = tf.GradientTape();
  46. tape.watch(x);
  47. var y = tf.reduce_sum(x);
  48. var z = tf.multiply(y, y);
  49. var dz_dx = tape.gradient(z, x);
  50. var expected = new float[] { 8.0f, 8.0f, 8.0f, 8.0f };
  51. Assert.IsTrue(Enumerable.SequenceEqual(dz_dx.ToArray<float>(), expected));
  52. }
  53. [TestMethod]
  54. public void PersistentTape()
  55. {
  56. var x = tf.ones((2, 2));
  57. using var tape = tf.GradientTape(persistent: true);
  58. tape.watch(x);
  59. var y = tf.reduce_sum(x);
  60. var z = tf.multiply(y, y);
  61. tape.Dispose();
  62. var dz_dx = tape.gradient(z, x);
  63. var expected = new float[] { 8.0f, 8.0f, 8.0f, 8.0f };
  64. Assert.IsTrue(Enumerable.SequenceEqual(dz_dx.ToArray<float>(), expected));
  65. var dz_dy = tape.gradient(z, y);
  66. Assert.AreEqual((float)dz_dy, 8.0f);
  67. }
  68. [TestMethod]
  69. public void ConditionalMultiply()
  70. {
  71. Func<Tensor, int, Tensor> func = (x, y) =>
  72. {
  73. Tensor output = tf.constant(1.0f);
  74. foreach (var i in range(y))
  75. {
  76. if (i > 1)
  77. output = tf.multiply(output, x);
  78. }
  79. return output;
  80. };
  81. Func<Tensor, int, Tensor> grad = (x, y) =>
  82. {
  83. using var tape = tf.GradientTape();
  84. tape.watch(x);
  85. var output = func(x, y);
  86. var grad = tape.gradient(output, x);
  87. return grad;
  88. };
  89. var x = tf.constant(2.0f);
  90. var result = grad(x, 4);
  91. Assert.AreEqual((float)result, 4.0f);
  92. }
  93. }
  94. }