You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

GradientTest.cs 28 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Linq;
  4. using Microsoft.VisualStudio.TestTools.UnitTesting;
  5. using NumSharp;
  6. using Tensorflow;
  7. using static Tensorflow.Binding;
  8. namespace TensorFlowNET.UnitTest.Gradient
  9. {
  10. [Ignore]
  11. [TestClass]
  12. public class GradientTest : PythonTest
  13. {
  14. [TestMethod]
  15. public void BroadcastToGrad()
  16. {
  17. var graph = tf.Graph().as_default();
  18. var x = tf.constant(2, dtype: dtypes.float32);
  19. var y = tf.broadcast_to(x, (2, 4, 3));
  20. var grad = tf.gradients(y, x);
  21. using (var sess = tf.Session(graph))
  22. {
  23. float result = sess.run(grad[0]);
  24. Assert.AreEqual(result, 24.0f);
  25. }
  26. }
  27. [TestMethod]
  28. public void CumsumGrad()
  29. {
  30. var graph = tf.Graph().as_default();
  31. var x = tf.constant(2, dtype: dtypes.float32);
  32. var y = tf.broadcast_to(x, (2, 4, 3));
  33. var z = tf.cumsum(y, axis: 1);
  34. var grad = tf.gradients(z, x);
  35. using (var sess = tf.Session(graph))
  36. {
  37. float result = sess.run(grad[0]);
  38. Assert.AreEqual(result, 60.0f);
  39. }
  40. }
  41. [Ignore("TODO")]
  42. [TestMethod]
  43. public void testGradients()
  44. {
  45. var g = tf.Graph().as_default();
  46. var inp = tf.constant(1.0, shape: new[] { 32, 100 }, name: "in");
  47. var w = tf.constant(1.0, shape: new[] { 100, 10 }, name: "w");
  48. var b = tf.constant(1.0, shape: new[] { 10 }, name: "b");
  49. var xw = math_ops.matmul(inp, w, name: "xw");
  50. var h = nn_ops.bias_add(xw, b, name: "h");
  51. var w_grad = gradients_impl.gradients(new[] { h }, new[] { w })[0];
  52. self.assertEquals("MatMul", w_grad.op.type);
  53. // TODO: Operation._original_op
  54. //self.assertEquals(w_grad.op._original_op, xw.op);
  55. self.assertTrue((bool)w_grad.op.get_attr("transpose_a"));
  56. self.assertFalse((bool)w_grad.op.get_attr("transpose_b"));
  57. }
  58. [TestMethod]
  59. public void testBatchMatMulGradient()
  60. {
  61. var a = tf.constant(np.array(Enumerable.Range(1, 18).Select(elem => (float)elem).ToArray()), shape: new[] { 2, 3, 3 });
  62. var b = tf.divide(a, tf.constant(2.0f));
  63. var c = tf.batch_matmul(a, b);
  64. var g = tf.gradients(c, new[] { a, b }, stop_gradients: new[] { a, b });
  65. var checkG = new[]
  66. {
  67. 3.0f, 7.5f, 12.0f,
  68. 3.0f, 7.5f, 12.0f,
  69. 3.0f, 7.5f, 12.0f,
  70. 16.5f, 21.0f, 25.5f,
  71. 16.5f, 21.0f, 25.5f,
  72. 16.5f, 21.0f, 25.5f,
  73. 12.0f, 12.0f, 12.0f,
  74. 15.0f, 15.0f, 15.0f,
  75. 18.0f, 18.0f, 18.0f,
  76. 39.0f, 39.0f, 39.0f,
  77. 42.0f, 42.0f, 42.0f,
  78. 45.0f, 45.0f, 45.0f
  79. };
  80. using (var sess = tf.Session())
  81. {
  82. var result = sess.run(g);
  83. var resultList = result[0].GetData<float>().ToList();
  84. resultList.AddRange(result[1].GetData<float>());
  85. Console.WriteLine(result.ToString());
  86. CollectionAssert.AreEqual(resultList.ToArray(), checkG);
  87. }
  88. }
  89. [TestMethod]
  90. public void testSimpleGradients()
  91. {
  92. (T, T) evaluateDerivatives<T>(Func<Tensor, Tensor> f, T xval) where T : unmanaged
  93. {
  94. var x = tf.constant(xval);
  95. var y = f(x);
  96. var g = tf.gradients(y, x);
  97. using (var session = tf.Session())
  98. {
  99. var result = session.run(new[] { y, g[0] });
  100. return (result[0].GetData<T>()[0], result[1].GetData<T>()[0]);
  101. }
  102. }
  103. void test(string name, Func<Tensor, Tensor> tfF, Func<double, (double, double)> targetF, double[] values)
  104. {
  105. foreach (var x in values)
  106. {
  107. var (expectedY, expectedDY) = targetF(x);
  108. {
  109. var (actualY, actualDY) = evaluateDerivatives(tfF, x);
  110. self.assertFloat64Equal(expectedY, actualY, $"value {name}/float64 at {x}");
  111. self.assertFloat64Equal(expectedDY, actualDY, $"derivative {name}/float64 at {x}");
  112. }
  113. {
  114. var (actualY, actualDY) = evaluateDerivatives(tfF, (float)x);
  115. self.assertFloat32Equal((float)expectedY, actualY, $"value {name}/float32 at {x}");
  116. self.assertFloat32Equal((float)expectedDY, actualDY, $"derivative {name}/float32 at {x}");
  117. }
  118. }
  119. }
  120. test("tf.exp",
  121. x => tf.exp(5 * x),
  122. x => (Math.Exp(5.0 * x), 5.0 * Math.Exp(5.0 * x)),
  123. new[] { -1.0, 0.0, 1.0, 1.5 });
  124. test("tf.log",
  125. x => tf.log(x),
  126. x => (Math.Log(x), 1.0 / x),
  127. new[] { 0.5, 1.0, 1.5, 2.0 });
  128. test("tf.sqrt",
  129. x => tf.sqrt(x),
  130. x => (Math.Sqrt(x), 0.5 / Math.Sqrt(x)),
  131. new[] { 0.5, 1.0, 1.1, 1.5, 2.0 });
  132. test("tf.sin",
  133. x => tf.sin(x),
  134. x => (Math.Sin(x), Math.Cos(x)),
  135. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  136. test("tf.sinh",
  137. x => tf.sinh(x),
  138. x => (Math.Sinh(x), Math.Cosh(x)),
  139. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  140. test("tf.cos",
  141. x => tf.cos(x),
  142. x => (Math.Cos(x), -Math.Sin(x)),
  143. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  144. test("tf.cosh",
  145. x => tf.cosh(x),
  146. x => (Math.Cosh(x), Math.Sinh(x)),
  147. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  148. test("tf.tanh",
  149. x => tf.tanh(x),
  150. x => (Math.Tanh(x), 1.0 - Math.Pow(Math.Tanh(x), 2.0)),
  151. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  152. test("tf.maximum",
  153. x => tf.maximum(x, tf.constant(0.0, dtype: x.dtype)),
  154. x => (Math.Max(x, 0.0), (x > 0.0) ? 1.0 : 0.0),
  155. new[] { -1.0, 1.0 });
  156. test("tf.minimum",
  157. x => tf.minimum(x, tf.constant(0.0, dtype: x.dtype)),
  158. x => (Math.Min(x, 0.0), (x < 0.0) ? 1.0 : 0.0),
  159. new[] { -1.0, 1.0 });
  160. }
  161. [TestMethod]
  162. public void testTanhGradient()
  163. {
  164. var a = tf.constant(1f);
  165. var b = tf.tanh(a);
  166. var g = tf.gradients(b, a);
  167. using (var sess = tf.Session())
  168. {
  169. var result = sess.run(g);
  170. var actual = result[0].GetData<float>()[0];
  171. self.assertEquals(0.41997434127f, actual);
  172. }
  173. }
  174. [TestMethod]
  175. public void testLgammaGrad()
  176. {
  177. var a = tf.constant(5f);
  178. var b = tf.lgamma(a);
  179. var g = tf.gradients(b, a);
  180. using (var sess = tf.Session())
  181. {
  182. var result = sess.run(new object[] { g, b });
  183. var actualDeriv = result[0].GetData<float>()[0];
  184. var actual = result[1].GetData<float>()[0];
  185. self.assertEquals(1.5061177f, actualDeriv);
  186. self.assertEquals(3.17805386f, actual);
  187. }
  188. }
  189. [TestMethod]
  190. public void testSliceGrad()
  191. {
  192. var a = tf.tanh(tf.constant(new[] { 2f, 3f }, shape: new[] { 2, 1 }));
  193. var b = tf.strided_slice(a,
  194. tf.constant(new[] { 0 }, tf.int32, new[] { 1 }),
  195. tf.constant(new[] { 1 }, tf.int32, new[] { 1 }),
  196. tf.constant(new[] { 1 }, tf.int32, new[] { 1 })
  197. );
  198. var g = tf.gradients(b, a);
  199. using (var sess = tf.Session())
  200. {
  201. var result = sess.run(new object[] { g, b });
  202. var actualDeriv = np.squeeze(result[0]);
  203. var actual = np.squeeze(result[1]);
  204. self.assertEquals(new float[] { 1, 0 }, new float[] { actualDeriv[0], actualDeriv[1] });
  205. self.assertEquals(0.9640276f, (float)actual);
  206. }
  207. }
  208. [TestMethod]
  209. public void testConcatGrad()
  210. {
  211. var a1 = tf.constant(new[] { 2f }, shape: new[] { 1 });
  212. var a2 = tf.constant(new[] { 3f }, shape: new[] { 1 });
  213. var a = tf.concat(new List<Tensor>(new[] { a1, a2 }), 0);
  214. var g = tf.gradients(a, a1);
  215. using (var sess = tf.Session())
  216. {
  217. var result = sess.run(new object[] { g, a });
  218. var actualDeriv = result[0].GetData<float>()[0];
  219. var actual = result[1].GetData<float>()[0];
  220. self.assertEquals(1f, actualDeriv);
  221. self.assertEquals(2f, actual);
  222. }
  223. }
  224. [TestMethod]
  225. public void testStopGradientFunction()
  226. {
  227. var ap = tf.constant(1f);
  228. var b = tf.tanh(ap) + gen_array_ops.stop_gradient(ap);
  229. var g = tf.gradients(b, ap);
  230. using (var sess = tf.Session())
  231. {
  232. var result = sess.run(g);
  233. var actual = result[0].GetData<float>()[0];
  234. self.assertEquals(0.41997434127f, actual);
  235. }
  236. }
  237. [Ignore("TODO")]
  238. [TestMethod]
  239. public void testUnusedOutput()
  240. {
  241. //def testUnusedOutput(self):
  242. // with ops.Graph().as_default():
  243. // w = constant(1.0, shape=[2, 2])
  244. // x = constant(1.0, shape=[2, 2])
  245. // wx = math_ops.matmul(w, x)
  246. // split_wx = array_ops.split(value=wx, num_or_size_splits=2, axis=0)
  247. // c = math_ops.reduce_sum(split_wx[1])
  248. // gw = gradients.gradients(c, [w])[0]
  249. // self.assertEquals("MatMul", gw.op.type)
  250. }
  251. [Ignore("TODO")]
  252. [TestMethod]
  253. public void testColocateGradients()
  254. {
  255. //def testColocateGradients(self):
  256. // with ops.Graph().as_default() as g:
  257. // w = constant(1.0, shape=[1, 1])
  258. // x = constant(1.0, shape=[1, 2])
  259. // with g.device("/device:GPU:0"):
  260. // wx = math_ops.matmul(w, x)
  261. // gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
  262. // self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
  263. }
  264. [Ignore("TODO")]
  265. [TestMethod]
  266. public void testColocateGradientsWithAggregation()
  267. {
  268. //def testColocateGradientsWithAggregation(self):
  269. // with ops.Graph().as_default() as g:
  270. // with g.device("/device:GPU:1"):
  271. // w = constant(1.0, shape=[1, 1])
  272. // x = constant(1.0, shape=[1, 2])
  273. // y = constant(1.0, shape=[1, 2])
  274. // wx = math_ops.matmul(w, x)
  275. // wy = math_ops.matmul(w, y)
  276. // with g.device("/device:GPU:0"):
  277. // z = wx + wy
  278. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  279. // self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
  280. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  281. // self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
  282. }
  283. [Ignore("TODO")]
  284. [TestMethod]
  285. public void testColocateGradientsWithAggregationInMultipleDevices()
  286. {
  287. //def testColocateGradientsWithAggregationInMultipleDevices(self):
  288. // with ops.Graph().as_default() as g:
  289. // with g.device("/device:GPU:1"):
  290. // w = constant(1.0, shape=[1, 1])
  291. // x = constant(1.0, shape=[1, 2])
  292. // y = constant(1.0, shape=[1, 2])
  293. // with g.device("/task:1"):
  294. // wx = math_ops.matmul(w, x)
  295. // with g.device("/task:2"):
  296. // wy = math_ops.matmul(w, y)
  297. // with g.device("/device:GPU:0"):
  298. // z = wx + wy
  299. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  300. // self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
  301. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  302. // self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
  303. }
  304. [Ignore("TODO")]
  305. [TestMethod]
  306. public void testColocateGradientsWithGateGradients()
  307. {
  308. //def testColocateGradientsWithGateGradients(self):
  309. // if not test_util.is_gpu_available():
  310. // self.skipTest("No GPU available")
  311. // with ops.Graph().as_default() as g:
  312. // with g.device("/device:CPU:0"):
  313. // x = constant(1.0, shape=[1, 1])
  314. // y = constant(1.0, shape=[1, 1])
  315. // s = x + y
  316. // with g.device("/device:GPU:0"):
  317. // z = math_ops.reduce_sum(s)
  318. // gz_x = gradients.gradients(z, [x], colocate_gradients_with_ops=True,
  319. // gate_gradients=True)[0]
  320. // with session.Session():
  321. // # Make sure the placer doesn't complain.
  322. // self.evaluate(gz_x)
  323. }
  324. [Ignore("TODO")]
  325. [TestMethod]
  326. public void testBoundaryStop()
  327. {
  328. //def testBoundaryStop(self):
  329. // # Test that we don't differentiate 'x'. The gradient function for 'x' is
  330. // # set explicitly to None so we will get an exception if the gradient code
  331. // # tries to differentiate 'x'.
  332. // with ops.Graph().as_default():
  333. // c = constant(1.0)
  334. // x = array_ops.identity(c)
  335. // y = x + 1.0
  336. // z = y + 1
  337. // grads = gradients.gradients(z, [x])
  338. // self.assertTrue(all(x is not None for x in grads))
  339. }
  340. [Ignore("TODO")]
  341. [TestMethod]
  342. public void testBoundaryContinue()
  343. {
  344. //@test_util.run_v1_only("b/120545219")
  345. //def testBoundaryContinue(self):
  346. // # Test that we differentiate both 'x' and 'y' correctly when x is a
  347. // # predecessor of y.
  348. // with self.cached_session():
  349. // x = constant(1.0)
  350. // y = x * 2.0
  351. // z = y * 3.0
  352. // grads = gradients.gradients(z, [x, y])
  353. // self.assertTrue(all(x is not None for x in grads))
  354. // self.assertEqual(6.0, grads[0].eval())
  355. }
  356. [Ignore("TODO")]
  357. [TestMethod]
  358. public void testAggregationMethodAccumulateN()
  359. {
  360. //@test_util.run_v1_only("b/120545219")
  361. //def testAggregationMethodAccumulateN(self):
  362. // with self.cached_session():
  363. // x = constant(1.0)
  364. // y = x * 2.0
  365. // z = y + y + y + y + y + y + y + y + y + y
  366. // grads = gradients.gradients(
  367. // z, [x, y],
  368. // aggregation_method=gradients.AggregationMethod.
  369. // EXPERIMENTAL_ACCUMULATE_N)
  370. // self.assertTrue(all(x is not None for x in grads))
  371. // self.assertEqual(20.0, grads[0].eval())
  372. // self.assertEqual(10.0, grads[1].eval())
  373. }
  374. [Ignore("TODO")]
  375. [TestMethod]
  376. public void testAggregationMethodAddN()
  377. {
  378. //@test_util.run_v1_only("b/120545219")
  379. //def testAggregationMethodAddN(self):
  380. // with self.cached_session():
  381. // x = constant(1.0)
  382. // y = x * 2.0
  383. // z = y + y + y + y + y + y + y + y + y + y
  384. // grads = gradients.gradients(
  385. // z, [x, y], aggregation_method=gradients.AggregationMethod.ADD_N)
  386. // self.assertTrue(all(x is not None for x in grads))
  387. // self.assertEqual(20.0, grads[0].eval())
  388. // self.assertEqual(10.0, grads[1].eval())
  389. }
  390. [Ignore("TODO")]
  391. [TestMethod]
  392. public void testAggregationMethodTree()
  393. {
  394. //@test_util.run_v1_only("b/120545219")
  395. //def testAggregationMethodTree(self):
  396. // with self.cached_session():
  397. // x = constant(1.0)
  398. // y = x * 2.0
  399. // z = y + y + y + y + y + y + y + y + y + y
  400. // grads = gradients.gradients(
  401. // z, [x, y],
  402. // aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
  403. // self.assertTrue(all(x is not None for x in grads))
  404. // self.assertEqual(20.0, grads[0].eval())
  405. // self.assertEqual(10.0, grads[1].eval())
  406. }
  407. [Ignore("TODO")]
  408. [TestMethod]
  409. public void testNoGradientForStringOutputs()
  410. {
  411. //def testNoGradientForStringOutputs(self):
  412. // with ops.Graph().as_default():
  413. // def _TestOpGrad(_, float_grad, string_grad):
  414. // """Gradient function for TestStringOutput."""
  415. // self.assertEquals(float_grad.dtype, dtypes.float32)
  416. // self.assertFalse(string_grad)
  417. // return float_grad
  418. // ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
  419. // c = constant(1.0)
  420. // x, _ = test_ops.test_string_output(c)
  421. // z = x * 2.0
  422. // w = z * 3.0
  423. // grads = gradients.gradients(z, [c])
  424. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  425. // grads = gradients.gradients(w, [c])
  426. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  427. }
  428. [Ignore("TODO")]
  429. [TestMethod]
  430. public void testSingletonIndexedSlices()
  431. {
  432. //def testSingletonIndexedSlices(self):
  433. // with ops.Graph().as_default():
  434. // x = array_ops.placeholder(dtypes.float32)
  435. // y = array_ops.identity(x)
  436. // dy = ops.IndexedSlices(
  437. // array_ops.placeholder(dtypes.float32),
  438. // array_ops.placeholder(dtypes.int32))
  439. // dx, = gradients.gradients(y, x, grad_ys=dy)
  440. // # The IndexedSlices gradient of tf.identity is the identity map.
  441. // with self.cached_session() as sess:
  442. // vdx, vdy = sess.run(
  443. // [dx, dy], feed_dict={x: [1.0], dy.indices: [0], dy.values: [2.0]})
  444. // self.assertEqual(vdx, vdy)
  445. }
  446. [Ignore("TODO")]
  447. [TestMethod]
  448. public void testNonDifferentiableSwitchInWhileLoop()
  449. {
  450. //@test_util.run_v1_only("b/120545219")
  451. //def testNonDifferentiableSwitchInWhileLoop(self):
  452. // with ops.Graph().as_default():
  453. // v = array_ops.placeholder(dtypes.float32, [])
  454. // def _Step(i, a, ta):
  455. // a += math_ops.cast(v, dtypes.int32)
  456. // return (i + 1, a, ta.write(i, a))
  457. // n = 4
  458. // i, _, ta = control_flow_ops.while_loop(
  459. // lambda i, *_: i < n,
  460. // _Step, [0, 0, tensor_array_ops.TensorArray(
  461. // dtypes.int32, size=n)])
  462. // target = ta.read(i - 1)
  463. // grad, = gradients.gradients(target, v)
  464. // self.assertIsNone(grad)
  465. }
  466. [Ignore("TODO")]
  467. [TestMethod]
  468. public void testVariableReadValueGradient()
  469. {
  470. //def testVariableReadValueGradient(self):
  471. // with ops.Graph().as_default():
  472. // init = constant_op.constant(100.0)
  473. // var = variables.Variable(init)
  474. // gradient = gradients.gradients(var.read_value(), var)
  475. // self.assertIsNotNone(gradient)
  476. }
  477. [Ignore("TODO")]
  478. [TestMethod]
  479. public void testVariableAsGraphElementGradient()
  480. {
  481. //def testVariableAsGraphElementGradient(self):
  482. // with ops.Graph().as_default() as graph:
  483. // init = constant_op.constant(100.0)
  484. // var = variables.Variable(init)
  485. // gradient = gradients.gradients(graph.as_graph_element(var), var)
  486. // self.assertIsNotNone(gradient)
  487. }
  488. [Ignore("TODO")]
  489. [TestMethod]
  490. public void testVariableRefGradient()
  491. {
  492. //@test_util.run_v1_only("b/120545219")
  493. //def testVariableRefGradient(self):
  494. // with ops.Graph().as_default():
  495. // init = constant_op.constant(100.0)
  496. // var = variables.VariableV1(init)
  497. // gradient = gradients.gradients(var._ref(), var)
  498. // self.assertIsNotNone(gradient)
  499. }
  500. [Ignore("TODO")]
  501. [TestMethod]
  502. public void testDependentYs()
  503. {
  504. //@test_util.run_v1_only("b/120545219")
  505. //def testDependentYs(self):
  506. // with self.cached_session():
  507. // x = constant_op.constant(3.0)
  508. // y = math_ops.square(x)
  509. // y1 = math_ops.square(y)
  510. // y2 = math_ops.square(y1)
  511. // g = gradients.gradients([y, y2], x)
  512. // self.assertAllClose(17502.0, g[0].eval())
  513. // g = gradients.gradients(y + y2, x)
  514. // self.assertAllClose(17502.0, g[0].eval())
  515. // z = array_ops.identity(y)
  516. // z2 = array_ops.identity(y2)
  517. // g = gradients.gradients([z, z2], x)
  518. // self.assertAllClose(17502.0, g[0].eval())
  519. }
  520. [Ignore("TODO")]
  521. [TestMethod]
  522. public void testPartialDerivatives()
  523. {
  524. //@test_util.run_v1_only("b/120545219")
  525. //def testPartialDerivatives(self):
  526. // with self.cached_session():
  527. // x = constant_op.constant(1.)
  528. // y = 2 * x
  529. // z = x + y
  530. // totalg = gradients.gradients(z, [x, y])
  531. // self.assertEqual([3.0, 1.0], [g.eval() for g in totalg])
  532. // partialg = gradients.gradients(z, [x, y], stop_gradients=[x, y])
  533. // self.assertEqual([1.0, 1.0], [g.eval() for g in partialg])
  534. }
  535. [Ignore("TODO")]
  536. [TestMethod]
  537. public void testStopGradients()
  538. {
  539. //@test_util.run_v1_only("b/120545219")
  540. //def testStopGradients(self):
  541. // def _MakeGraph(rng, stop_gradients=()):
  542. // def _FunctionOf(xs, k=3):
  543. // return ops.convert_to_tensor(
  544. // sum(math_ops.matmul(rng.rand(k, k), x) for x in xs)
  545. // + rng.rand(k, k))
  546. // a = _FunctionOf([])
  547. // if "a" in stop_gradients: a = array_ops.stop_gradient(a)
  548. // b = _FunctionOf([a])
  549. // if "b" in stop_gradients: b = array_ops.stop_gradient(b)
  550. // c = _FunctionOf([a, b])
  551. // if "c" in stop_gradients: c = array_ops.stop_gradient(c)
  552. // d = _FunctionOf([b, c])
  553. // if "d" in stop_gradients: d = array_ops.stop_gradient(d)
  554. // return dict(a=a, b=b, c=c, d=d)
  555. // def _Gradients(ys, xs, **kwargs):
  556. // dydxs = gradients.gradients(ys, xs, **kwargs)
  557. // dydxs = [0. * x if dydx is None else dydx
  558. // for x, dydx in zip(xs, dydxs)]
  559. // return dydxs
  560. // seed = np.random.randint(1000)
  561. // cases = []
  562. // subsets = [""] + "a b c d ab ac ad bc bd cd abc abd acd bcd abcd".split()
  563. // graph = _MakeGraph(np.random.RandomState(seed))
  564. // for constants in subsets:
  565. // graph_with_stops = _MakeGraph(np.random.RandomState(seed), constants)
  566. // for variables_ in subsets:
  567. // # compute the gradient when stopped using tf.stop_gradients
  568. // grad1 = _Gradients([graph_with_stops["d"]],
  569. // [graph_with_stops[v] for v in variables_])
  570. // # compute the gradient when stopped using the stop_gradients kwarg
  571. // grad2 = _Gradients([graph["d"]],
  572. // [graph[v] for v in variables_],
  573. // stop_gradients=[graph[v] for v in constants])
  574. // cases.append(dict(grad1=grad1, grad2=grad2,
  575. // constants=constants, variables=variables_))
  576. // # evaluate all tensors in one call to session.run for speed
  577. // with self.cached_session() as sess:
  578. // results = sess.run([(case["grad1"], case["grad2"]) for case in cases])
  579. // for (npgrad1, npgrad2), case in zip(results, cases):
  580. // for a, b in zip(npgrad1, npgrad2):
  581. // np.testing.assert_allclose(a, b)
  582. }
  583. [Ignore("TODO")]
  584. [TestMethod]
  585. public void testUnconnectedGradientsNoneUnconnectedGradients()
  586. {
  587. //def testUnconnectedGradientsNoneUnconnectedGradients(self):
  588. // with ops.Graph().as_default():
  589. // x = constant(1.0, shape=[2, 2])
  590. // y = constant(3.0, shape=[3, 1])
  591. // grad = gradients.gradients(
  592. // [y], [x], unconnected_gradients="none")
  593. // self.assertIsNone(grad[0])
  594. }
  595. [Ignore("TODO")]
  596. [TestMethod]
  597. public void testUnconnectedGradientsZerosUnconnectedGradients()
  598. {
  599. //def testUnconnectedGradientsZerosUnconnectedGradients(self):
  600. // with ops.Graph().as_default():
  601. // x = constant(1.0, shape=[2, 2])
  602. // y = constant(3.0, shape=[3, 1])
  603. // grads = gradients.gradients(
  604. // [y], [x], unconnected_gradients="zero")
  605. // with self.cached_session() as sess:
  606. // self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(grads)[0])
  607. }
  608. [Ignore("TODO")]
  609. [TestMethod]
  610. public void testUnconnectedGradientsZeroConnectedGradients()
  611. {
  612. //def testUnconnectedGradientsZeroConnectedGradients(self):
  613. // with ops.Graph().as_default():
  614. // x = constant(1.0)
  615. // y = x * 3.0
  616. // grad = gradients.gradients(
  617. // [y], [x], unconnected_gradients="zero")
  618. // with self.cached_session() as sess:
  619. // self.assertEquals(3.0, self.evaluate(grad)[0])
  620. }
  621. [Ignore("TODO")]
  622. [TestMethod]
  623. public void testUnknownUnconnectedGradientsValueGiven()
  624. {
  625. //def testUnknownUnconnectedGradientsValueGiven(self):
  626. // with ops.Graph().as_default():
  627. // x = constant(1.0)
  628. // y = constant(1.0)
  629. // with self.assertRaisesRegexp(
  630. // ValueError, "Unknown value for unconnected_gradients: 'nonsense'"):
  631. // gradients.gradients([y], [x], unconnected_gradients="nonsense")
  632. }
  633. /*
  634. */
  635. }
  636. }