You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

GradientTest.cs 28 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Linq;
  4. using Microsoft.VisualStudio.TestTools.UnitTesting;
  5. using NumSharp;
  6. using Tensorflow;
  7. using Tensorflow.UnitTest;
  8. using static Tensorflow.Binding;
  9. namespace TensorFlowNET.UnitTest.Gradient
  10. {
  11. [TestClass]
  12. public class GradientTest : GraphModeTestBase
  13. {
  14. [TestMethod]
  15. public void BroadcastToGrad()
  16. {
  17. var graph = tf.Graph().as_default();
  18. var x = tf.constant(2, dtype: dtypes.float32);
  19. var y = tf.broadcast_to(x, (2, 4, 3));
  20. var grad = tf.gradients(y, x);
  21. using (var sess = tf.Session(graph))
  22. {
  23. float result = sess.run(grad[0]);
  24. Assert.AreEqual(result, 24.0f);
  25. }
  26. }
  27. [TestMethod]
  28. public void CumsumGrad()
  29. {
  30. var graph = tf.Graph().as_default();
  31. var x = tf.constant(2, dtype: dtypes.float32);
  32. var y = tf.broadcast_to(x, (2, 4, 3));
  33. var z = tf.cumsum(y, axis: 1);
  34. var grad = tf.gradients(z, x);
  35. using (var sess = tf.Session(graph))
  36. {
  37. float result = sess.run(grad[0]);
  38. Assert.AreEqual(result, 60.0f);
  39. }
  40. }
  41. [TestMethod]
  42. public void testGradients()
  43. {
  44. var g = tf.Graph().as_default();
  45. var inp = tf.constant(1.0, shape: new[] { 32, 100 }, name: "in");
  46. var w = tf.constant(1.0, shape: new[] { 100, 10 }, name: "w");
  47. var b = tf.constant(1.0, shape: new[] { 10 }, name: "b");
  48. var xw = math_ops.matmul(inp, w, name: "xw");
  49. var h = nn_ops.bias_add(xw, b, name: "h");
  50. var w_grad = gradients_impl.gradients(new[] { h }, new[] { w })[0];
  51. self.assertEquals("MatMul", w_grad.op.type);
  52. // TODO: Operation._original_op
  53. //self.assertEquals(w_grad.op._original_op, xw.op);
  54. self.assertTrue((bool)w_grad.op.get_attr("transpose_a"));
  55. self.assertFalse((bool)w_grad.op.get_attr("transpose_b"));
  56. }
  57. [TestMethod]
  58. public void testBatchMatMulGradient()
  59. {
  60. var a = tf.constant(np.array(Enumerable.Range(1, 18).Select(elem => (float)elem).ToArray()), shape: new[] { 2, 3, 3 });
  61. var b = tf.divide(a, tf.constant(2.0f));
  62. var c = tf.batch_matmul(a, b);
  63. var g = tf.gradients(c, new[] { a, b }, stop_gradients: new[] { a, b });
  64. var checkG = new[]
  65. {
  66. 3.0f, 7.5f, 12.0f,
  67. 3.0f, 7.5f, 12.0f,
  68. 3.0f, 7.5f, 12.0f,
  69. 16.5f, 21.0f, 25.5f,
  70. 16.5f, 21.0f, 25.5f,
  71. 16.5f, 21.0f, 25.5f,
  72. 12.0f, 12.0f, 12.0f,
  73. 15.0f, 15.0f, 15.0f,
  74. 18.0f, 18.0f, 18.0f,
  75. 39.0f, 39.0f, 39.0f,
  76. 42.0f, 42.0f, 42.0f,
  77. 45.0f, 45.0f, 45.0f
  78. };
  79. using (var sess = tf.Session())
  80. {
  81. var result = sess.run(g);
  82. var resultList = result[0].GetData<float>().ToList();
  83. resultList.AddRange(result[1].GetData<float>());
  84. Console.WriteLine(result.ToString());
  85. CollectionAssert.AreEqual(resultList.ToArray(), checkG);
  86. }
  87. }
  88. [TestMethod]
  89. public void testSimpleGradients()
  90. {
  91. (T, T) evaluateDerivatives<T>(Func<Tensor, Tensor> f, T xval) where T : unmanaged
  92. {
  93. var x = tf.constant(xval);
  94. var y = f(x);
  95. var g = tf.gradients(y, x);
  96. using (var session = tf.Session())
  97. {
  98. var result = session.run(new[] { y, g[0] });
  99. return (result[0].GetData<T>()[0], result[1].GetData<T>()[0]);
  100. }
  101. }
  102. void test(string name, Func<Tensor, Tensor> tfF, Func<double, (double, double)> targetF, double[] values)
  103. {
  104. foreach (var x in values)
  105. {
  106. var (expectedY, expectedDY) = targetF(x);
  107. {
  108. var (actualY, actualDY) = evaluateDerivatives(tfF, x);
  109. self.assertFloat64Equal(expectedY, actualY, $"value {name}/float64 at {x}");
  110. self.assertFloat64Equal(expectedDY, actualDY, $"derivative {name}/float64 at {x}");
  111. }
  112. {
  113. var (actualY, actualDY) = evaluateDerivatives(tfF, (float)x);
  114. self.assertFloat32Equal((float)expectedY, actualY, $"value {name}/float32 at {x}");
  115. self.assertFloat32Equal((float)expectedDY, actualDY, $"derivative {name}/float32 at {x}");
  116. }
  117. }
  118. }
  119. test("tf.exp",
  120. x => tf.exp(5 * x),
  121. x => (Math.Exp(5.0 * x), 5.0 * Math.Exp(5.0 * x)),
  122. new[] { -1.0, 0.0, 1.0, 1.5 });
  123. test("tf.log",
  124. x => tf.log(x),
  125. x => (Math.Log(x), 1.0 / x),
  126. new[] { 0.5, 1.0, 1.5, 2.0 });
  127. test("tf.sqrt",
  128. x => tf.sqrt(x),
  129. x => (Math.Sqrt(x), 0.5 / Math.Sqrt(x)),
  130. new[] { 0.5, 1.0, 1.1, 1.5, 2.0 });
  131. test("tf.sin",
  132. x => tf.sin(x),
  133. x => (Math.Sin(x), Math.Cos(x)),
  134. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  135. test("tf.sinh",
  136. x => tf.sinh(x),
  137. x => (Math.Sinh(x), Math.Cosh(x)),
  138. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  139. test("tf.cos",
  140. x => tf.cos(x),
  141. x => (Math.Cos(x), -Math.Sin(x)),
  142. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  143. test("tf.cosh",
  144. x => tf.cosh(x),
  145. x => (Math.Cosh(x), Math.Sinh(x)),
  146. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  147. test("tf.tanh",
  148. x => tf.tanh(x),
  149. x => (Math.Tanh(x), 1.0 - Math.Pow(Math.Tanh(x), 2.0)),
  150. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  151. test("tf.maximum",
  152. x => tf.maximum(x, tf.constant(0.0, dtype: x.dtype)),
  153. x => (Math.Max(x, 0.0), (x > 0.0) ? 1.0 : 0.0),
  154. new[] { -1.0, 1.0 });
  155. test("tf.minimum",
  156. x => tf.minimum(x, tf.constant(0.0, dtype: x.dtype)),
  157. x => (Math.Min(x, 0.0), (x < 0.0) ? 1.0 : 0.0),
  158. new[] { -1.0, 1.0 });
  159. }
  160. [TestMethod]
  161. public void testTanhGradient()
  162. {
  163. var a = tf.constant(1f);
  164. var b = tf.tanh(a);
  165. var g = tf.gradients(b, a);
  166. using (var sess = tf.Session())
  167. {
  168. var result = sess.run(g);
  169. var actual = result[0].GetData<float>()[0];
  170. self.assertEquals(0.41997434127f, actual);
  171. }
  172. }
  173. [TestMethod]
  174. public void testLgammaGrad()
  175. {
  176. var a = tf.constant(5f);
  177. var b = tf.lgamma(a);
  178. var g = tf.gradients(b, a);
  179. using (var sess = tf.Session())
  180. {
  181. var result = sess.run(new object[] { g, b });
  182. var actualDeriv = result[0].GetData<float>()[0];
  183. var actual = result[1].GetData<float>()[0];
  184. self.assertEquals(1.5061177f, actualDeriv);
  185. self.assertEquals(3.17805386f, actual);
  186. }
  187. }
  188. [TestMethod]
  189. public void testSliceGrad()
  190. {
  191. var a = tf.tanh(tf.constant(new[] { 2f, 3f }, shape: new[] { 2, 1 }));
  192. var b = tf.strided_slice(a,
  193. tf.constant(new[] { 0 }, tf.int32, new[] { 1 }),
  194. tf.constant(new[] { 1 }, tf.int32, new[] { 1 }),
  195. tf.constant(new[] { 1 }, tf.int32, new[] { 1 })
  196. );
  197. var g = tf.gradients(b, a);
  198. using (var sess = tf.Session())
  199. {
  200. var result = sess.run(new object[] { g, b });
  201. var actualDeriv = np.squeeze(result[0]);
  202. var actual = np.squeeze(result[1]);
  203. self.assertEquals(new float[] { 1, 0 }, new float[] { actualDeriv[0], actualDeriv[1] });
  204. self.assertEquals(0.9640276f, (float)actual);
  205. }
  206. }
  207. [TestMethod]
  208. public void testConcatGrad()
  209. {
  210. var a1 = tf.constant(new[] { 2f }, shape: new[] { 1 });
  211. var a2 = tf.constant(new[] { 3f }, shape: new[] { 1 });
  212. var a = tf.concat(new List<Tensor>(new[] { a1, a2 }), 0);
  213. var g = tf.gradients(a, a1);
  214. using (var sess = tf.Session())
  215. {
  216. var result = sess.run(new object[] { g, a });
  217. var actualDeriv = result[0].GetData<float>()[0];
  218. var actual = result[1].GetData<float>()[0];
  219. self.assertEquals(1f, actualDeriv);
  220. self.assertEquals(2f, actual);
  221. }
  222. }
  223. [TestMethod]
  224. public void testStopGradientFunction()
  225. {
  226. var ap = tf.constant(1f);
  227. var b = tf.tanh(ap) + gen_array_ops.stop_gradient(ap);
  228. var g = tf.gradients(b, ap);
  229. using (var sess = tf.Session())
  230. {
  231. var result = sess.run(g);
  232. var actual = result[0].GetData<float>()[0];
  233. self.assertEquals(0.41997434127f, actual);
  234. }
  235. }
  236. [Ignore("TODO")]
  237. [TestMethod]
  238. public void testUnusedOutput()
  239. {
  240. //def testUnusedOutput(self):
  241. // with ops.Graph().as_default():
  242. // w = constant(1.0, shape=[2, 2])
  243. // x = constant(1.0, shape=[2, 2])
  244. // wx = math_ops.matmul(w, x)
  245. // split_wx = array_ops.split(value=wx, num_or_size_splits=2, axis=0)
  246. // c = math_ops.reduce_sum(split_wx[1])
  247. // gw = gradients.gradients(c, [w])[0]
  248. // self.assertEquals("MatMul", gw.op.type)
  249. }
  250. [Ignore("TODO")]
  251. [TestMethod]
  252. public void testColocateGradients()
  253. {
  254. //def testColocateGradients(self):
  255. // with ops.Graph().as_default() as g:
  256. // w = constant(1.0, shape=[1, 1])
  257. // x = constant(1.0, shape=[1, 2])
  258. // with g.device("/device:GPU:0"):
  259. // wx = math_ops.matmul(w, x)
  260. // gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
  261. // self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
  262. }
  263. [Ignore("TODO")]
  264. [TestMethod]
  265. public void testColocateGradientsWithAggregation()
  266. {
  267. //def testColocateGradientsWithAggregation(self):
  268. // with ops.Graph().as_default() as g:
  269. // with g.device("/device:GPU:1"):
  270. // w = constant(1.0, shape=[1, 1])
  271. // x = constant(1.0, shape=[1, 2])
  272. // y = constant(1.0, shape=[1, 2])
  273. // wx = math_ops.matmul(w, x)
  274. // wy = math_ops.matmul(w, y)
  275. // with g.device("/device:GPU:0"):
  276. // z = wx + wy
  277. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  278. // self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
  279. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  280. // self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
  281. }
  282. [Ignore("TODO")]
  283. [TestMethod]
  284. public void testColocateGradientsWithAggregationInMultipleDevices()
  285. {
  286. //def testColocateGradientsWithAggregationInMultipleDevices(self):
  287. // with ops.Graph().as_default() as g:
  288. // with g.device("/device:GPU:1"):
  289. // w = constant(1.0, shape=[1, 1])
  290. // x = constant(1.0, shape=[1, 2])
  291. // y = constant(1.0, shape=[1, 2])
  292. // with g.device("/task:1"):
  293. // wx = math_ops.matmul(w, x)
  294. // with g.device("/task:2"):
  295. // wy = math_ops.matmul(w, y)
  296. // with g.device("/device:GPU:0"):
  297. // z = wx + wy
  298. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  299. // self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
  300. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  301. // self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
  302. }
  303. [Ignore("TODO")]
  304. [TestMethod]
  305. public void testColocateGradientsWithGateGradients()
  306. {
  307. //def testColocateGradientsWithGateGradients(self):
  308. // if not test_util.is_gpu_available():
  309. // self.skipTest("No GPU available")
  310. // with ops.Graph().as_default() as g:
  311. // with g.device("/device:CPU:0"):
  312. // x = constant(1.0, shape=[1, 1])
  313. // y = constant(1.0, shape=[1, 1])
  314. // s = x + y
  315. // with g.device("/device:GPU:0"):
  316. // z = math_ops.reduce_sum(s)
  317. // gz_x = gradients.gradients(z, [x], colocate_gradients_with_ops=True,
  318. // gate_gradients=True)[0]
  319. // with session.Session():
  320. // # Make sure the placer doesn't complain.
  321. // self.evaluate(gz_x)
  322. }
  323. [Ignore("TODO")]
  324. [TestMethod]
  325. public void testBoundaryStop()
  326. {
  327. //def testBoundaryStop(self):
  328. // # Test that we don't differentiate 'x'. The gradient function for 'x' is
  329. // # set explicitly to None so we will get an exception if the gradient code
  330. // # tries to differentiate 'x'.
  331. // with ops.Graph().as_default():
  332. // c = constant(1.0)
  333. // x = array_ops.identity(c)
  334. // y = x + 1.0
  335. // z = y + 1
  336. // grads = gradients.gradients(z, [x])
  337. // self.assertTrue(all(x is not None for x in grads))
  338. }
  339. [Ignore("TODO")]
  340. [TestMethod]
  341. public void testBoundaryContinue()
  342. {
  343. //@test_util.run_v1_only("b/120545219")
  344. //def testBoundaryContinue(self):
  345. // # Test that we differentiate both 'x' and 'y' correctly when x is a
  346. // # predecessor of y.
  347. // with self.cached_session():
  348. // x = constant(1.0)
  349. // y = x * 2.0
  350. // z = y * 3.0
  351. // grads = gradients.gradients(z, [x, y])
  352. // self.assertTrue(all(x is not None for x in grads))
  353. // self.assertEqual(6.0, grads[0].eval())
  354. }
  355. [Ignore("TODO")]
  356. [TestMethod]
  357. public void testAggregationMethodAccumulateN()
  358. {
  359. //@test_util.run_v1_only("b/120545219")
  360. //def testAggregationMethodAccumulateN(self):
  361. // with self.cached_session():
  362. // x = constant(1.0)
  363. // y = x * 2.0
  364. // z = y + y + y + y + y + y + y + y + y + y
  365. // grads = gradients.gradients(
  366. // z, [x, y],
  367. // aggregation_method=gradients.AggregationMethod.
  368. // EXPERIMENTAL_ACCUMULATE_N)
  369. // self.assertTrue(all(x is not None for x in grads))
  370. // self.assertEqual(20.0, grads[0].eval())
  371. // self.assertEqual(10.0, grads[1].eval())
  372. }
  373. [Ignore("TODO")]
  374. [TestMethod]
  375. public void testAggregationMethodAddN()
  376. {
  377. //@test_util.run_v1_only("b/120545219")
  378. //def testAggregationMethodAddN(self):
  379. // with self.cached_session():
  380. // x = constant(1.0)
  381. // y = x * 2.0
  382. // z = y + y + y + y + y + y + y + y + y + y
  383. // grads = gradients.gradients(
  384. // z, [x, y], aggregation_method=gradients.AggregationMethod.ADD_N)
  385. // self.assertTrue(all(x is not None for x in grads))
  386. // self.assertEqual(20.0, grads[0].eval())
  387. // self.assertEqual(10.0, grads[1].eval())
  388. }
  389. [Ignore("TODO")]
  390. [TestMethod]
  391. public void testAggregationMethodTree()
  392. {
  393. //@test_util.run_v1_only("b/120545219")
  394. //def testAggregationMethodTree(self):
  395. // with self.cached_session():
  396. // x = constant(1.0)
  397. // y = x * 2.0
  398. // z = y + y + y + y + y + y + y + y + y + y
  399. // grads = gradients.gradients(
  400. // z, [x, y],
  401. // aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
  402. // self.assertTrue(all(x is not None for x in grads))
  403. // self.assertEqual(20.0, grads[0].eval())
  404. // self.assertEqual(10.0, grads[1].eval())
  405. }
  406. [Ignore("TODO")]
  407. [TestMethod]
  408. public void testNoGradientForStringOutputs()
  409. {
  410. //def testNoGradientForStringOutputs(self):
  411. // with ops.Graph().as_default():
  412. // def _TestOpGrad(_, float_grad, string_grad):
  413. // """Gradient function for TestStringOutput."""
  414. // self.assertEquals(float_grad.dtype, dtypes.float32)
  415. // self.assertFalse(string_grad)
  416. // return float_grad
  417. // ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
  418. // c = constant(1.0)
  419. // x, _ = test_ops.test_string_output(c)
  420. // z = x * 2.0
  421. // w = z * 3.0
  422. // grads = gradients.gradients(z, [c])
  423. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  424. // grads = gradients.gradients(w, [c])
  425. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  426. }
  427. [Ignore("TODO")]
  428. [TestMethod]
  429. public void testSingletonIndexedSlices()
  430. {
  431. //def testSingletonIndexedSlices(self):
  432. // with ops.Graph().as_default():
  433. // x = array_ops.placeholder(dtypes.float32)
  434. // y = array_ops.identity(x)
  435. // dy = ops.IndexedSlices(
  436. // array_ops.placeholder(dtypes.float32),
  437. // array_ops.placeholder(dtypes.int32))
  438. // dx, = gradients.gradients(y, x, grad_ys=dy)
  439. // # The IndexedSlices gradient of tf.identity is the identity map.
  440. // with self.cached_session() as sess:
  441. // vdx, vdy = sess.run(
  442. // [dx, dy], feed_dict={x: [1.0], dy.indices: [0], dy.values: [2.0]})
  443. // self.assertEqual(vdx, vdy)
  444. }
  445. [Ignore("TODO")]
  446. [TestMethod]
  447. public void testNonDifferentiableSwitchInWhileLoop()
  448. {
  449. //@test_util.run_v1_only("b/120545219")
  450. //def testNonDifferentiableSwitchInWhileLoop(self):
  451. // with ops.Graph().as_default():
  452. // v = array_ops.placeholder(dtypes.float32, [])
  453. // def _Step(i, a, ta):
  454. // a += math_ops.cast(v, dtypes.int32)
  455. // return (i + 1, a, ta.write(i, a))
  456. // n = 4
  457. // i, _, ta = control_flow_ops.while_loop(
  458. // lambda i, *_: i < n,
  459. // _Step, [0, 0, tensor_array_ops.TensorArray(
  460. // dtypes.int32, size=n)])
  461. // target = ta.read(i - 1)
  462. // grad, = gradients.gradients(target, v)
  463. // self.assertIsNone(grad)
  464. }
  465. [Ignore("TODO")]
  466. [TestMethod]
  467. public void testVariableReadValueGradient()
  468. {
  469. //def testVariableReadValueGradient(self):
  470. // with ops.Graph().as_default():
  471. // init = constant_op.constant(100.0)
  472. // var = variables.Variable(init)
  473. // gradient = gradients.gradients(var.read_value(), var)
  474. // self.assertIsNotNone(gradient)
  475. }
  476. [Ignore("TODO")]
  477. [TestMethod]
  478. public void testVariableAsGraphElementGradient()
  479. {
  480. //def testVariableAsGraphElementGradient(self):
  481. // with ops.Graph().as_default() as graph:
  482. // init = constant_op.constant(100.0)
  483. // var = variables.Variable(init)
  484. // gradient = gradients.gradients(graph.as_graph_element(var), var)
  485. // self.assertIsNotNone(gradient)
  486. }
  487. [Ignore("TODO")]
  488. [TestMethod]
  489. public void testVariableRefGradient()
  490. {
  491. //@test_util.run_v1_only("b/120545219")
  492. //def testVariableRefGradient(self):
  493. // with ops.Graph().as_default():
  494. // init = constant_op.constant(100.0)
  495. // var = variables.VariableV1(init)
  496. // gradient = gradients.gradients(var._ref(), var)
  497. // self.assertIsNotNone(gradient)
  498. }
  499. [Ignore("TODO")]
  500. [TestMethod]
  501. public void testDependentYs()
  502. {
  503. //@test_util.run_v1_only("b/120545219")
  504. //def testDependentYs(self):
  505. // with self.cached_session():
  506. // x = constant_op.constant(3.0)
  507. // y = math_ops.square(x)
  508. // y1 = math_ops.square(y)
  509. // y2 = math_ops.square(y1)
  510. // g = gradients.gradients([y, y2], x)
  511. // self.assertAllClose(17502.0, g[0].eval())
  512. // g = gradients.gradients(y + y2, x)
  513. // self.assertAllClose(17502.0, g[0].eval())
  514. // z = array_ops.identity(y)
  515. // z2 = array_ops.identity(y2)
  516. // g = gradients.gradients([z, z2], x)
  517. // self.assertAllClose(17502.0, g[0].eval())
  518. }
  519. [Ignore("TODO")]
  520. [TestMethod]
  521. public void testPartialDerivatives()
  522. {
  523. //@test_util.run_v1_only("b/120545219")
  524. //def testPartialDerivatives(self):
  525. // with self.cached_session():
  526. // x = constant_op.constant(1.)
  527. // y = 2 * x
  528. // z = x + y
  529. // totalg = gradients.gradients(z, [x, y])
  530. // self.assertEqual([3.0, 1.0], [g.eval() for g in totalg])
  531. // partialg = gradients.gradients(z, [x, y], stop_gradients=[x, y])
  532. // self.assertEqual([1.0, 1.0], [g.eval() for g in partialg])
  533. }
  534. [Ignore("TODO")]
  535. [TestMethod]
  536. public void testStopGradients()
  537. {
  538. //@test_util.run_v1_only("b/120545219")
  539. //def testStopGradients(self):
  540. // def _MakeGraph(rng, stop_gradients=()):
  541. // def _FunctionOf(xs, k=3):
  542. // return ops.convert_to_tensor(
  543. // sum(math_ops.matmul(rng.rand(k, k), x) for x in xs)
  544. // + rng.rand(k, k))
  545. // a = _FunctionOf([])
  546. // if "a" in stop_gradients: a = array_ops.stop_gradient(a)
  547. // b = _FunctionOf([a])
  548. // if "b" in stop_gradients: b = array_ops.stop_gradient(b)
  549. // c = _FunctionOf([a, b])
  550. // if "c" in stop_gradients: c = array_ops.stop_gradient(c)
  551. // d = _FunctionOf([b, c])
  552. // if "d" in stop_gradients: d = array_ops.stop_gradient(d)
  553. // return dict(a=a, b=b, c=c, d=d)
  554. // def _Gradients(ys, xs, **kwargs):
  555. // dydxs = gradients.gradients(ys, xs, **kwargs)
  556. // dydxs = [0. * x if dydx is None else dydx
  557. // for x, dydx in zip(xs, dydxs)]
  558. // return dydxs
  559. // seed = np.random.randint(1000)
  560. // cases = []
  561. // subsets = [""] + "a b c d ab ac ad bc bd cd abc abd acd bcd abcd".split()
  562. // graph = _MakeGraph(np.random.RandomState(seed))
  563. // for constants in subsets:
  564. // graph_with_stops = _MakeGraph(np.random.RandomState(seed), constants)
  565. // for variables_ in subsets:
  566. // # compute the gradient when stopped using tf.stop_gradients
  567. // grad1 = _Gradients([graph_with_stops["d"]],
  568. // [graph_with_stops[v] for v in variables_])
  569. // # compute the gradient when stopped using the stop_gradients kwarg
  570. // grad2 = _Gradients([graph["d"]],
  571. // [graph[v] for v in variables_],
  572. // stop_gradients=[graph[v] for v in constants])
  573. // cases.append(dict(grad1=grad1, grad2=grad2,
  574. // constants=constants, variables=variables_))
  575. // # evaluate all tensors in one call to session.run for speed
  576. // with self.cached_session() as sess:
  577. // results = sess.run([(case["grad1"], case["grad2"]) for case in cases])
  578. // for (npgrad1, npgrad2), case in zip(results, cases):
  579. // for a, b in zip(npgrad1, npgrad2):
  580. // np.testing.assert_allclose(a, b)
  581. }
  582. [Ignore("TODO")]
  583. [TestMethod]
  584. public void testUnconnectedGradientsNoneUnconnectedGradients()
  585. {
  586. //def testUnconnectedGradientsNoneUnconnectedGradients(self):
  587. // with ops.Graph().as_default():
  588. // x = constant(1.0, shape=[2, 2])
  589. // y = constant(3.0, shape=[3, 1])
  590. // grad = gradients.gradients(
  591. // [y], [x], unconnected_gradients="none")
  592. // self.assertIsNone(grad[0])
  593. }
  594. [Ignore("TODO")]
  595. [TestMethod]
  596. public void testUnconnectedGradientsZerosUnconnectedGradients()
  597. {
  598. //def testUnconnectedGradientsZerosUnconnectedGradients(self):
  599. // with ops.Graph().as_default():
  600. // x = constant(1.0, shape=[2, 2])
  601. // y = constant(3.0, shape=[3, 1])
  602. // grads = gradients.gradients(
  603. // [y], [x], unconnected_gradients="zero")
  604. // with self.cached_session() as sess:
  605. // self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(grads)[0])
  606. }
  607. [Ignore("TODO")]
  608. [TestMethod]
  609. public void testUnconnectedGradientsZeroConnectedGradients()
  610. {
  611. //def testUnconnectedGradientsZeroConnectedGradients(self):
  612. // with ops.Graph().as_default():
  613. // x = constant(1.0)
  614. // y = x * 3.0
  615. // grad = gradients.gradients(
  616. // [y], [x], unconnected_gradients="zero")
  617. // with self.cached_session() as sess:
  618. // self.assertEquals(3.0, self.evaluate(grad)[0])
  619. }
  620. [Ignore("TODO")]
  621. [TestMethod]
  622. public void testUnknownUnconnectedGradientsValueGiven()
  623. {
  624. //def testUnknownUnconnectedGradientsValueGiven(self):
  625. // with ops.Graph().as_default():
  626. // x = constant(1.0)
  627. // y = constant(1.0)
  628. // with self.assertRaisesRegexp(
  629. // ValueError, "Unknown value for unconnected_gradients: 'nonsense'"):
  630. // gradients.gradients([y], [x], unconnected_gradients="nonsense")
  631. }
  632. /*
  633. */
  634. }
  635. }