You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

GradientTest.cs 29 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763
  1. using Microsoft.VisualStudio.TestTools.UnitTesting;
  2. using Tensorflow.NumPy;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Linq;
  6. using Tensorflow;
  7. using static Tensorflow.Binding;
  8. namespace TensorFlowNET.UnitTest.Gradient
  9. {
  10. [TestClass]
  11. public class GradientTest : GraphModeTestBase
  12. {
  13. [TestMethod]
  14. public void BroadcastToGrad()
  15. {
  16. var x = tf.constant(2, dtype: dtypes.float32);
  17. var y = tf.broadcast_to(x, (2, 4, 3));
  18. var grad = tf.gradients(y, x);
  19. using (var sess = tf.Session(graph))
  20. {
  21. float result = sess.run(grad[0]);
  22. Assert.AreEqual(result, 24.0f);
  23. }
  24. }
  25. [TestMethod]
  26. public void CumsumGrad()
  27. {
  28. var x = tf.constant(2, dtype: dtypes.float32);
  29. var y = tf.broadcast_to(x, (2, 4, 3));
  30. var z = tf.cumsum(y, axis: 1);
  31. var grad = tf.gradients(z, x);
  32. using (var sess = tf.Session(graph))
  33. {
  34. float result = sess.run(grad[0]);
  35. Assert.AreEqual(result, 60.0f);
  36. }
  37. }
  38. [TestMethod, Ignore]
  39. public void testGradients()
  40. {
  41. var inp = tf.constant(1.0, shape: new[] { 32, 100 }, name: "in");
  42. var w = tf.constant(1.0, shape: new[] { 100, 10 }, name: "w");
  43. var b = tf.Variable(1.0, shape: new[] { 10 }, name: "b");
  44. var xw = math_ops.matmul(inp, w, name: "xw");
  45. var h = nn_ops.bias_add(xw, b, name: "h");
  46. var w_grad = gradients_impl.gradients(new[] { h }, new[] { w })[0];
  47. self.assertEquals("MatMul", w_grad.op.type);
  48. // TODO: Operation._original_op
  49. //self.assertEquals(w_grad.op._original_op, xw.op);
  50. self.assertTrue((bool)w_grad.op.get_attr("transpose_a"));
  51. self.assertFalse((bool)w_grad.op.get_attr("transpose_b"));
  52. }
  53. [TestMethod]
  54. public void testBatchMatMulGradient()
  55. {
  56. var a = tf.constant(np.array(Enumerable.Range(1, 18).Select(elem => (float)elem).ToArray()), shape: new[] { 2, 3, 3 });
  57. var b = tf.divide(a, tf.constant(2.0f));
  58. var c = tf.batch_matmul(a, b);
  59. var g = tf.gradients(c, new[] { a, b }, stop_gradients: new[] { a, b });
  60. var checkG = new[]
  61. {
  62. 3.0f, 7.5f, 12.0f,
  63. 3.0f, 7.5f, 12.0f,
  64. 3.0f, 7.5f, 12.0f,
  65. 16.5f, 21.0f, 25.5f,
  66. 16.5f, 21.0f, 25.5f,
  67. 16.5f, 21.0f, 25.5f,
  68. 12.0f, 12.0f, 12.0f,
  69. 15.0f, 15.0f, 15.0f,
  70. 18.0f, 18.0f, 18.0f,
  71. 39.0f, 39.0f, 39.0f,
  72. 42.0f, 42.0f, 42.0f,
  73. 45.0f, 45.0f, 45.0f
  74. };
  75. using (var sess = tf.Session())
  76. {
  77. var result = sess.run(g);
  78. var resultList = result[0].ToArray<float>().ToList();
  79. resultList.AddRange(result[1].ToArray<float>());
  80. Console.WriteLine(result.ToString());
  81. CollectionAssert.AreEqual(resultList.ToArray(), checkG);
  82. }
  83. }
  84. [TestMethod]
  85. public void testSimpleGradients()
  86. {
  87. (T, T) evaluateDerivatives<T>(Func<Tensor, Tensor> f, T xval) where T : unmanaged
  88. {
  89. var x = tf.constant(xval);
  90. var y = f(x);
  91. var g = tf.gradients(y, x);
  92. using (var session = tf.Session())
  93. {
  94. var result = session.run(new[] { y, g[0] });
  95. return (result[0].ToArray<T>()[0], result[1].ToArray<T>()[0]);
  96. }
  97. }
  98. void test(string name, Func<Tensor, Tensor> tfF, Func<double, (double, double)> targetF, double[] values)
  99. {
  100. foreach (var x in values)
  101. {
  102. var (expectedY, expectedDY) = targetF(x);
  103. {
  104. var (actualY, actualDY) = evaluateDerivatives(tfF, x);
  105. self.assertFloat64Equal(expectedY, actualY, $"value {name}/float64 at {x}");
  106. self.assertFloat64Equal(expectedDY, actualDY, $"derivative {name}/float64 at {x}");
  107. }
  108. {
  109. var (actualY, actualDY) = evaluateDerivatives(tfF, (float)x);
  110. self.assertFloat32Equal((float)expectedY, actualY, $"value {name}/float32 at {x}");
  111. self.assertFloat32Equal((float)expectedDY, actualDY, $"derivative {name}/float32 at {x}");
  112. }
  113. }
  114. }
  115. test("tf.exp",
  116. x => tf.exp(5 * x),
  117. x => (Math.Exp(5.0 * x), 5.0 * Math.Exp(5.0 * x)),
  118. new[] { -1.0, 0.0, 1.0, 1.5 });
  119. test("tf.log",
  120. x => tf.log(x),
  121. x => (Math.Log(x), 1.0 / x),
  122. new[] { 0.5, 1.0, 1.5, 2.0 });
  123. test("tf.sqrt",
  124. x => tf.sqrt(x),
  125. x => (Math.Sqrt(x), 0.5 / Math.Sqrt(x)),
  126. new[] { 0.5, 1.0, 1.1, 1.5, 2.0 });
  127. test("tf.sin",
  128. x => tf.sin(x),
  129. x => (Math.Sin(x), Math.Cos(x)),
  130. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  131. test("tf.sinh",
  132. x => tf.sinh(x),
  133. x => (Math.Sinh(x), Math.Cosh(x)),
  134. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  135. test("tf.cos",
  136. x => tf.cos(x),
  137. x => (Math.Cos(x), -Math.Sin(x)),
  138. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  139. test("tf.cosh",
  140. x => tf.cosh(x),
  141. x => (Math.Cosh(x), Math.Sinh(x)),
  142. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  143. test("tf.tanh",
  144. x => tf.tanh(x),
  145. x => (Math.Tanh(x), 1.0 - Math.Pow(Math.Tanh(x), 2.0)),
  146. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  147. test("tf.maximum",
  148. x => tf.maximum(x, tf.constant(0.0, dtype: x.dtype)),
  149. x => (Math.Max(x, 0.0), (x > 0.0) ? 1.0 : 0.0),
  150. new[] { -1.0, 1.0 });
  151. test("tf.minimum",
  152. x => tf.minimum(x, tf.constant(0.0, dtype: x.dtype)),
  153. x => (Math.Min(x, 0.0), (x < 0.0) ? 1.0 : 0.0),
  154. new[] { -1.0, 1.0 });
  155. }
  156. [TestMethod]
  157. public void testReduceSumGradients()
  158. {
  159. /* python code
  160. import tensorflow.compat.v1 as tf
  161. tf.disable_v2_behavior()
  162. x = tf.placeholder(tf.float64, shape = (1, 1))
  163. m = tf.broadcast_to(x, (2, 3))
  164. g0 = tf.gradients(tf.reduce_sum(m), x)[0]
  165. g1 = tf.gradients(tf.reduce_sum(m, axis = 0), x)[0]
  166. g2 = tf.gradients(tf.reduce_sum(m, axis = 1), x)[0]
  167. with tf.compat.v1.Session() as sess:
  168. (r0, r1, r2) = sess.run((g0, g1, g2), {x: [[1.0]]})
  169. */
  170. var x = tf.placeholder(tf.float64, shape: new Shape(1, 1));
  171. var m = tf.broadcast_to(x, new Shape(2, 3));
  172. var g0 = tf.gradients(tf.reduce_sum(m), x)[0];
  173. var g1 = tf.gradients(tf.reduce_sum(m, axis: 0), x)[0];
  174. var g2 = tf.gradients(tf.reduce_sum(m, axis: 1), x)[0];
  175. using (var session = tf.Session())
  176. {
  177. var (r0, r1, r2) = session.run((g0, g1, g2), new FeedItem(x, new[,] { { 1.0 } }));
  178. self.assertFloat64Equal(6.0, r0[0], $"tf.reduce_sum(...)");
  179. self.assertFloat64Equal(6.0, r1[0], $"tf.reduce_sum(..., axis = 0)");
  180. self.assertFloat64Equal(6.0, r2[0], $"tf.reduce_sum(..., axis = 1)");
  181. }
  182. }
  183. [TestMethod]
  184. public void testTanhGradient()
  185. {
  186. var a = tf.constant(1f);
  187. var b = tf.tanh(a);
  188. var g = tf.gradients(b, a);
  189. using (var sess = tf.Session())
  190. {
  191. var result = sess.run(g);
  192. var actual = result[0];
  193. Assert.AreEqual(actual, 0.41997434127f);
  194. }
  195. }
  196. [TestMethod]
  197. public void testLgammaGrad()
  198. {
  199. var a = tf.constant(5f);
  200. var b = tf.lgamma(a);
  201. var g = tf.gradients(b, a);
  202. using (var sess = tf.Session())
  203. {
  204. var result = sess.run(new object[] { g, b });
  205. var actualDeriv = result[0];
  206. var actual = result[1];
  207. Assert.AreEqual(actualDeriv, 1.5061177f);
  208. Assert.AreEqual(actual, 3.17805386f);
  209. }
  210. }
  211. [TestMethod]
  212. public void testSliceGrad()
  213. {
  214. var a = tf.tanh(tf.constant(new[] { 2f, 3f }, shape: new[] { 2, 1 }));
  215. var b = tf.strided_slice(a,
  216. tf.constant(new[] { 0 }, tf.int32, new[] { 1 }),
  217. tf.constant(new[] { 1 }, tf.int32, new[] { 1 }),
  218. tf.constant(new[] { 1 }, tf.int32, new[] { 1 })
  219. );
  220. var g = tf.gradients(b, a);
  221. using (var sess = tf.Session())
  222. {
  223. var result = sess.run(new object[] { g, b });
  224. var actualDeriv = np.squeeze(result[0]);
  225. var actual = np.squeeze(result[1]);
  226. Assert.AreEqual(actualDeriv, new float[] { 1, 0 });
  227. Assert.AreEqual(actual, 0.9640276f);
  228. }
  229. }
  230. [TestMethod]
  231. public void testConcatGrad()
  232. {
  233. var a1 = tf.constant(new[] { 2f }, shape: new[] { 1 });
  234. var a2 = tf.constant(new[] { 3f }, shape: new[] { 1 });
  235. var a = tf.concat(new List<Tensor>(new[] { a1, a2 }), 0);
  236. var g = tf.gradients(a, a1);
  237. using (var sess = tf.Session())
  238. {
  239. var result = sess.run(new object[] { g, a });
  240. var actualDeriv = result[0][0];
  241. var actual = result[1][0];
  242. Assert.AreEqual(actualDeriv, 1f);
  243. Assert.AreEqual(actual, 2f);
  244. }
  245. }
  246. [TestMethod]
  247. public void testStopGradientFunction()
  248. {
  249. var ap = tf.constant(1f);
  250. var b = tf.tanh(ap) + gen_array_ops.stop_gradient(ap);
  251. var g = tf.gradients(b, ap);
  252. using (var sess = tf.Session())
  253. {
  254. var result = sess.run(g);
  255. var actual = result[0];
  256. Assert.AreEqual(actual, 0.41997434127f);
  257. }
  258. }
  259. [Ignore("TODO")]
  260. [TestMethod]
  261. public void testUnusedOutput()
  262. {
  263. //def testUnusedOutput(self):
  264. // with ops.Graph().as_default():
  265. // w = constant(1.0, shape=[2, 2])
  266. // x = constant(1.0, shape=[2, 2])
  267. // wx = math_ops.matmul(w, x)
  268. // split_wx = array_ops.split(value=wx, num_or_size_splits=2, axis=0)
  269. // c = math_ops.reduce_sum(split_wx[1])
  270. // gw = gradients.gradients(c, [w])[0]
  271. // self.assertEquals("MatMul", gw.op.type)
  272. }
  273. [Ignore("TODO")]
  274. [TestMethod]
  275. public void testColocateGradients()
  276. {
  277. //def testColocateGradients(self):
  278. // with ops.Graph().as_default() as g:
  279. // w = constant(1.0, shape=[1, 1])
  280. // x = constant(1.0, shape=[1, 2])
  281. // with g.device("/device:GPU:0"):
  282. // wx = math_ops.matmul(w, x)
  283. // gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
  284. // self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
  285. }
  286. [Ignore("TODO")]
  287. [TestMethod]
  288. public void testColocateGradientsWithAggregation()
  289. {
  290. //def testColocateGradientsWithAggregation(self):
  291. // with ops.Graph().as_default() as g:
  292. // with g.device("/device:GPU:1"):
  293. // w = constant(1.0, shape=[1, 1])
  294. // x = constant(1.0, shape=[1, 2])
  295. // y = constant(1.0, shape=[1, 2])
  296. // wx = math_ops.matmul(w, x)
  297. // wy = math_ops.matmul(w, y)
  298. // with g.device("/device:GPU:0"):
  299. // z = wx + wy
  300. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  301. // self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
  302. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  303. // self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
  304. }
  305. [Ignore("TODO")]
  306. [TestMethod]
  307. public void testColocateGradientsWithAggregationInMultipleDevices()
  308. {
  309. //def testColocateGradientsWithAggregationInMultipleDevices(self):
  310. // with ops.Graph().as_default() as g:
  311. // with g.device("/device:GPU:1"):
  312. // w = constant(1.0, shape=[1, 1])
  313. // x = constant(1.0, shape=[1, 2])
  314. // y = constant(1.0, shape=[1, 2])
  315. // with g.device("/task:1"):
  316. // wx = math_ops.matmul(w, x)
  317. // with g.device("/task:2"):
  318. // wy = math_ops.matmul(w, y)
  319. // with g.device("/device:GPU:0"):
  320. // z = wx + wy
  321. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  322. // self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
  323. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  324. // self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
  325. }
  326. [Ignore("TODO")]
  327. [TestMethod]
  328. public void testColocateGradientsWithGateGradients()
  329. {
  330. //def testColocateGradientsWithGateGradients(self):
  331. // if not test_util.is_gpu_available():
  332. // self.skipTest("No GPU available")
  333. // with ops.Graph().as_default() as g:
  334. // with g.device("/device:CPU:0"):
  335. // x = constant(1.0, shape=[1, 1])
  336. // y = constant(1.0, shape=[1, 1])
  337. // s = x + y
  338. // with g.device("/device:GPU:0"):
  339. // z = math_ops.reduce_sum(s)
  340. // gz_x = gradients.gradients(z, [x], colocate_gradients_with_ops=True,
  341. // gate_gradients=True)[0]
  342. // with session.Session():
  343. // # Make sure the placer doesn't complain.
  344. // self.evaluate(gz_x)
  345. }
  346. [Ignore("TODO")]
  347. [TestMethod]
  348. public void testBoundaryStop()
  349. {
  350. //def testBoundaryStop(self):
  351. // # Test that we don't differentiate 'x'. The gradient function for 'x' is
  352. // # set explicitly to None so we will get an exception if the gradient code
  353. // # tries to differentiate 'x'.
  354. // with ops.Graph().as_default():
  355. // c = constant(1.0)
  356. // x = array_ops.identity(c)
  357. // y = x + 1.0
  358. // z = y + 1
  359. // grads = gradients.gradients(z, [x])
  360. // self.assertTrue(all(x is not None for x in grads))
  361. }
  362. [Ignore("TODO")]
  363. [TestMethod]
  364. public void testBoundaryContinue()
  365. {
  366. //@test_util.run_v1_only("b/120545219")
  367. //def testBoundaryContinue(self):
  368. // # Test that we differentiate both 'x' and 'y' correctly when x is a
  369. // # predecessor of y.
  370. // with self.cached_session():
  371. // x = constant(1.0)
  372. // y = x * 2.0
  373. // z = y * 3.0
  374. // grads = gradients.gradients(z, [x, y])
  375. // self.assertTrue(all(x is not None for x in grads))
  376. // self.assertEqual(6.0, grads[0].eval())
  377. }
  378. [Ignore("TODO")]
  379. [TestMethod]
  380. public void testAggregationMethodAccumulateN()
  381. {
  382. //@test_util.run_v1_only("b/120545219")
  383. //def testAggregationMethodAccumulateN(self):
  384. // with self.cached_session():
  385. // x = constant(1.0)
  386. // y = x * 2.0
  387. // z = y + y + y + y + y + y + y + y + y + y
  388. // grads = gradients.gradients(
  389. // z, [x, y],
  390. // aggregation_method=gradients.AggregationMethod.
  391. // EXPERIMENTAL_ACCUMULATE_N)
  392. // self.assertTrue(all(x is not None for x in grads))
  393. // self.assertEqual(20.0, grads[0].eval())
  394. // self.assertEqual(10.0, grads[1].eval())
  395. }
  396. [Ignore("TODO")]
  397. [TestMethod]
  398. public void testAggregationMethodAddN()
  399. {
  400. //@test_util.run_v1_only("b/120545219")
  401. //def testAggregationMethodAddN(self):
  402. // with self.cached_session():
  403. // x = constant(1.0)
  404. // y = x * 2.0
  405. // z = y + y + y + y + y + y + y + y + y + y
  406. // grads = gradients.gradients(
  407. // z, [x, y], aggregation_method=gradients.AggregationMethod.ADD_N)
  408. // self.assertTrue(all(x is not None for x in grads))
  409. // self.assertEqual(20.0, grads[0].eval())
  410. // self.assertEqual(10.0, grads[1].eval())
  411. }
  412. [Ignore("TODO")]
  413. [TestMethod]
  414. public void testAggregationMethodTree()
  415. {
  416. //@test_util.run_v1_only("b/120545219")
  417. //def testAggregationMethodTree(self):
  418. // with self.cached_session():
  419. // x = constant(1.0)
  420. // y = x * 2.0
  421. // z = y + y + y + y + y + y + y + y + y + y
  422. // grads = gradients.gradients(
  423. // z, [x, y],
  424. // aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
  425. // self.assertTrue(all(x is not None for x in grads))
  426. // self.assertEqual(20.0, grads[0].eval())
  427. // self.assertEqual(10.0, grads[1].eval())
  428. }
  429. [Ignore("TODO")]
  430. [TestMethod]
  431. public void testNoGradientForStringOutputs()
  432. {
  433. //def testNoGradientForStringOutputs(self):
  434. // with ops.Graph().as_default():
  435. // def _TestOpGrad(_, float_grad, string_grad):
  436. // """Gradient function for TestStringOutput."""
  437. // self.assertEquals(float_grad.dtype, dtypes.float32)
  438. // self.assertFalse(string_grad)
  439. // return float_grad
  440. // ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
  441. // c = constant(1.0)
  442. // x, _ = test_ops.test_string_output(c)
  443. // z = x * 2.0
  444. // w = z * 3.0
  445. // grads = gradients.gradients(z, [c])
  446. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  447. // grads = gradients.gradients(w, [c])
  448. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  449. }
  450. [Ignore("TODO")]
  451. [TestMethod]
  452. public void testSingletonIndexedSlices()
  453. {
  454. //def testSingletonIndexedSlices(self):
  455. // with ops.Graph().as_default():
  456. // x = array_ops.placeholder(dtypes.float32)
  457. // y = array_ops.identity(x)
  458. // dy = ops.IndexedSlices(
  459. // array_ops.placeholder(dtypes.float32),
  460. // array_ops.placeholder(dtypes.int32))
  461. // dx, = gradients.gradients(y, x, grad_ys=dy)
  462. // # The IndexedSlices gradient of tf.identity is the identity map.
  463. // with self.cached_session() as sess:
  464. // vdx, vdy = sess.run(
  465. // [dx, dy], feed_dict={x: [1.0], dy.indices: [0], dy.values: [2.0]})
  466. // self.assertEqual(vdx, vdy)
  467. }
  468. [Ignore("TODO")]
  469. [TestMethod]
  470. public void testNonDifferentiableSwitchInWhileLoop()
  471. {
  472. //@test_util.run_v1_only("b/120545219")
  473. //def testNonDifferentiableSwitchInWhileLoop(self):
  474. // with ops.Graph().as_default():
  475. // v = array_ops.placeholder(dtypes.float32, [])
  476. // def _Step(i, a, ta):
  477. // a += math_ops.cast(v, dtypes.int32)
  478. // return (i + 1, a, ta.write(i, a))
  479. // n = 4
  480. // i, _, ta = control_flow_ops.while_loop(
  481. // lambda i, *_: i < n,
  482. // _Step, [0, 0, tensor_array_ops.TensorArray(
  483. // dtypes.int32, size=n)])
  484. // target = ta.read(i - 1)
  485. // grad, = gradients.gradients(target, v)
  486. // self.assertIsNone(grad)
  487. }
  488. [Ignore("TODO")]
  489. [TestMethod]
  490. public void testVariableReadValueGradient()
  491. {
  492. //def testVariableReadValueGradient(self):
  493. // with ops.Graph().as_default():
  494. // init = constant_op.constant(100.0)
  495. // var = variables.Variable(init)
  496. // gradient = gradients.gradients(var.read_value(), var)
  497. // self.assertIsNotNone(gradient)
  498. }
  499. [Ignore("TODO")]
  500. [TestMethod]
  501. public void testVariableAsGraphElementGradient()
  502. {
  503. //def testVariableAsGraphElementGradient(self):
  504. // with ops.Graph().as_default() as graph:
  505. // init = constant_op.constant(100.0)
  506. // var = variables.Variable(init)
  507. // gradient = gradients.gradients(graph.as_graph_element(var), var)
  508. // self.assertIsNotNone(gradient)
  509. }
  510. [Ignore("TODO")]
  511. [TestMethod]
  512. public void testVariableRefGradient()
  513. {
  514. //@test_util.run_v1_only("b/120545219")
  515. //def testVariableRefGradient(self):
  516. // with ops.Graph().as_default():
  517. // init = constant_op.constant(100.0)
  518. // var = variables.VariableV1(init)
  519. // gradient = gradients.gradients(var._ref(), var)
  520. // self.assertIsNotNone(gradient)
  521. }
  522. [Ignore("TODO")]
  523. [TestMethod]
  524. public void testDependentYs()
  525. {
  526. //@test_util.run_v1_only("b/120545219")
  527. //def testDependentYs(self):
  528. // with self.cached_session():
  529. // x = constant_op.constant(3.0)
  530. // y = math_ops.square(x)
  531. // y1 = math_ops.square(y)
  532. // y2 = math_ops.square(y1)
  533. // g = gradients.gradients([y, y2], x)
  534. // self.assertAllClose(17502.0, g[0].eval())
  535. // g = gradients.gradients(y + y2, x)
  536. // self.assertAllClose(17502.0, g[0].eval())
  537. // z = array_ops.identity(y)
  538. // z2 = array_ops.identity(y2)
  539. // g = gradients.gradients([z, z2], x)
  540. // self.assertAllClose(17502.0, g[0].eval())
  541. }
  542. [Ignore("TODO")]
  543. [TestMethod]
  544. public void testPartialDerivatives()
  545. {
  546. //@test_util.run_v1_only("b/120545219")
  547. //def testPartialDerivatives(self):
  548. // with self.cached_session():
  549. // x = constant_op.constant(1.)
  550. // y = 2 * x
  551. // z = x + y
  552. // totalg = gradients.gradients(z, [x, y])
  553. // self.assertEqual([3.0, 1.0], [g.eval() for g in totalg])
  554. // partialg = gradients.gradients(z, [x, y], stop_gradients=[x, y])
  555. // self.assertEqual([1.0, 1.0], [g.eval() for g in partialg])
  556. }
  557. [Ignore("TODO")]
  558. [TestMethod]
  559. public void testStopGradients()
  560. {
  561. //@test_util.run_v1_only("b/120545219")
  562. //def testStopGradients(self):
  563. // def _MakeGraph(rng, stop_gradients=()):
  564. // def _FunctionOf(xs, k=3):
  565. // return ops.convert_to_tensor(
  566. // sum(math_ops.matmul(rng.rand(k, k), x) for x in xs)
  567. // + rng.rand(k, k))
  568. // a = _FunctionOf([])
  569. // if "a" in stop_gradients: a = array_ops.stop_gradient(a)
  570. // b = _FunctionOf([a])
  571. // if "b" in stop_gradients: b = array_ops.stop_gradient(b)
  572. // c = _FunctionOf([a, b])
  573. // if "c" in stop_gradients: c = array_ops.stop_gradient(c)
  574. // d = _FunctionOf([b, c])
  575. // if "d" in stop_gradients: d = array_ops.stop_gradient(d)
  576. // return dict(a=a, b=b, c=c, d=d)
  577. // def _Gradients(ys, xs, **kwargs):
  578. // dydxs = gradients.gradients(ys, xs, **kwargs)
  579. // dydxs = [0. * x if dydx is None else dydx
  580. // for x, dydx in zip(xs, dydxs)]
  581. // return dydxs
  582. // seed = np.random.randint(1000)
  583. // cases = []
  584. // subsets = [""] + "a b c d ab ac ad bc bd cd abc abd acd bcd abcd".split()
  585. // graph = _MakeGraph(np.random.RandomState(seed))
  586. // for constants in subsets:
  587. // graph_with_stops = _MakeGraph(np.random.RandomState(seed), constants)
  588. // for variables_ in subsets:
  589. // # compute the gradient when stopped using tf.stop_gradients
  590. // grad1 = _Gradients([graph_with_stops["d"]],
  591. // [graph_with_stops[v] for v in variables_])
  592. // # compute the gradient when stopped using the stop_gradients kwarg
  593. // grad2 = _Gradients([graph["d"]],
  594. // [graph[v] for v in variables_],
  595. // stop_gradients=[graph[v] for v in constants])
  596. // cases.append(dict(grad1=grad1, grad2=grad2,
  597. // constants=constants, variables=variables_))
  598. // # evaluate all tensors in one call to session.run for speed
  599. // with self.cached_session() as sess:
  600. // results = sess.run([(case["grad1"], case["grad2"]) for case in cases])
  601. // for (npgrad1, npgrad2), case in zip(results, cases):
  602. // for a, b in zip(npgrad1, npgrad2):
  603. // np.testing.assert_allclose(a, b)
  604. }
  605. [Ignore("TODO")]
  606. [TestMethod]
  607. public void testUnconnectedGradientsNoneUnconnectedGradients()
  608. {
  609. //def testUnconnectedGradientsNoneUnconnectedGradients(self):
  610. // with ops.Graph().as_default():
  611. // x = constant(1.0, shape=[2, 2])
  612. // y = constant(3.0, shape=[3, 1])
  613. // grad = gradients.gradients(
  614. // [y], [x], unconnected_gradients="none")
  615. // self.assertIsNone(grad[0])
  616. }
  617. [Ignore("TODO")]
  618. [TestMethod]
  619. public void testUnconnectedGradientsZerosUnconnectedGradients()
  620. {
  621. //def testUnconnectedGradientsZerosUnconnectedGradients(self):
  622. // with ops.Graph().as_default():
  623. // x = constant(1.0, shape=[2, 2])
  624. // y = constant(3.0, shape=[3, 1])
  625. // grads = gradients.gradients(
  626. // [y], [x], unconnected_gradients="zero")
  627. // with self.cached_session() as sess:
  628. // self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(grads)[0])
  629. }
  630. [Ignore("TODO")]
  631. [TestMethod]
  632. public void testUnconnectedGradientsZeroConnectedGradients()
  633. {
  634. //def testUnconnectedGradientsZeroConnectedGradients(self):
  635. // with ops.Graph().as_default():
  636. // x = constant(1.0)
  637. // y = x * 3.0
  638. // grad = gradients.gradients(
  639. // [y], [x], unconnected_gradients="zero")
  640. // with self.cached_session() as sess:
  641. // self.assertEquals(3.0, self.evaluate(grad)[0])
  642. }
  643. [Ignore("TODO")]
  644. [TestMethod]
  645. public void testUnknownUnconnectedGradientsValueGiven()
  646. {
  647. //def testUnknownUnconnectedGradientsValueGiven(self):
  648. // with ops.Graph().as_default():
  649. // x = constant(1.0)
  650. // y = constant(1.0)
  651. // with self.assertRaisesRegexp(
  652. // ValueError, "Unknown value for unconnected_gradients: 'nonsense'"):
  653. // gradients.gradients([y], [x], unconnected_gradients="nonsense")
  654. }
  655. /*
  656. */
  657. }
  658. }