You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

GradientTest.cs 28 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732
  1. using Microsoft.VisualStudio.TestTools.UnitTesting;
  2. using Tensorflow.NumPy;
  3. using System;
  4. using System.Collections.Generic;
  5. using System.Linq;
  6. using Tensorflow;
  7. using static Tensorflow.Binding;
  8. namespace TensorFlowNET.UnitTest.Gradient
  9. {
  10. [TestClass]
  11. public class GradientTest : GraphModeTestBase
  12. {
  13. [TestMethod]
  14. public void BroadcastToGrad()
  15. {
  16. var x = tf.constant(2, dtype: dtypes.float32);
  17. var y = tf.broadcast_to(x, (2, 4, 3));
  18. var grad = tf.gradients(y, x);
  19. using (var sess = tf.Session(graph))
  20. {
  21. float result = sess.run(grad[0]);
  22. Assert.AreEqual(result, 24.0f);
  23. }
  24. }
  25. [TestMethod]
  26. public void CumsumGrad()
  27. {
  28. var x = tf.constant(2, dtype: dtypes.float32);
  29. var y = tf.broadcast_to(x, (2, 4, 3));
  30. var z = tf.cumsum(y, axis: 1);
  31. var grad = tf.gradients(z, x);
  32. using (var sess = tf.Session(graph))
  33. {
  34. float result = sess.run(grad[0]);
  35. Assert.AreEqual(result, 60.0f);
  36. }
  37. }
  38. [TestMethod, Ignore]
  39. public void testGradients()
  40. {
  41. var inp = tf.constant(1.0, shape: new[] { 32, 100 }, name: "in");
  42. var w = tf.constant(1.0, shape: new[] { 100, 10 }, name: "w");
  43. var b = tf.Variable(1.0, shape: new[] { 10 }, name: "b");
  44. var xw = math_ops.matmul(inp, w, name: "xw");
  45. var h = nn_ops.bias_add(xw, b, name: "h");
  46. var w_grad = gradients_impl.gradients(new[] { h }, new[] { w })[0];
  47. self.assertEquals("MatMul", w_grad.op.type);
  48. // TODO: Operation._original_op
  49. //self.assertEquals(w_grad.op._original_op, xw.op);
  50. self.assertTrue((bool)w_grad.op.get_attr("transpose_a"));
  51. self.assertFalse((bool)w_grad.op.get_attr("transpose_b"));
  52. }
  53. [TestMethod]
  54. public void testBatchMatMulGradient()
  55. {
  56. var a = tf.constant(np.array(Enumerable.Range(1, 18).Select(elem => (float)elem).ToArray()), shape: new[] { 2, 3, 3 });
  57. var b = tf.divide(a, tf.constant(2.0f));
  58. var c = tf.batch_matmul(a, b);
  59. var g = tf.gradients(c, new[] { a, b }, stop_gradients: new[] { a, b });
  60. var checkG = new[]
  61. {
  62. 3.0f, 7.5f, 12.0f,
  63. 3.0f, 7.5f, 12.0f,
  64. 3.0f, 7.5f, 12.0f,
  65. 16.5f, 21.0f, 25.5f,
  66. 16.5f, 21.0f, 25.5f,
  67. 16.5f, 21.0f, 25.5f,
  68. 12.0f, 12.0f, 12.0f,
  69. 15.0f, 15.0f, 15.0f,
  70. 18.0f, 18.0f, 18.0f,
  71. 39.0f, 39.0f, 39.0f,
  72. 42.0f, 42.0f, 42.0f,
  73. 45.0f, 45.0f, 45.0f
  74. };
  75. using (var sess = tf.Session())
  76. {
  77. var result = sess.run(g);
  78. var resultList = result[0].ToArray<float>().ToList();
  79. resultList.AddRange(result[1].ToArray<float>());
  80. Console.WriteLine(result.ToString());
  81. CollectionAssert.AreEqual(resultList.ToArray(), checkG);
  82. }
  83. }
  84. [TestMethod]
  85. public void testSimpleGradients()
  86. {
  87. (T, T) evaluateDerivatives<T>(Func<Tensor, Tensor> f, T xval) where T : unmanaged
  88. {
  89. var x = tf.constant(xval);
  90. var y = f(x);
  91. var g = tf.gradients(y, x);
  92. using (var session = tf.Session())
  93. {
  94. var result = session.run(new[] { y, g[0] });
  95. return (result[0].ToArray<T>()[0], result[1].ToArray<T>()[0]);
  96. }
  97. }
  98. void test(string name, Func<Tensor, Tensor> tfF, Func<double, (double, double)> targetF, double[] values)
  99. {
  100. foreach (var x in values)
  101. {
  102. var (expectedY, expectedDY) = targetF(x);
  103. {
  104. var (actualY, actualDY) = evaluateDerivatives(tfF, x);
  105. self.assertFloat64Equal(expectedY, actualY, $"value {name}/float64 at {x}");
  106. self.assertFloat64Equal(expectedDY, actualDY, $"derivative {name}/float64 at {x}");
  107. }
  108. {
  109. var (actualY, actualDY) = evaluateDerivatives(tfF, (float)x);
  110. self.assertFloat32Equal((float)expectedY, actualY, $"value {name}/float32 at {x}");
  111. self.assertFloat32Equal((float)expectedDY, actualDY, $"derivative {name}/float32 at {x}");
  112. }
  113. }
  114. }
  115. test("tf.exp",
  116. x => tf.exp(5 * x),
  117. x => (Math.Exp(5.0 * x), 5.0 * Math.Exp(5.0 * x)),
  118. new[] { -1.0, 0.0, 1.0, 1.5 });
  119. test("tf.log",
  120. x => tf.log(x),
  121. x => (Math.Log(x), 1.0 / x),
  122. new[] { 0.5, 1.0, 1.5, 2.0 });
  123. test("tf.sqrt",
  124. x => tf.sqrt(x),
  125. x => (Math.Sqrt(x), 0.5 / Math.Sqrt(x)),
  126. new[] { 0.5, 1.0, 1.1, 1.5, 2.0 });
  127. test("tf.sin",
  128. x => tf.sin(x),
  129. x => (Math.Sin(x), Math.Cos(x)),
  130. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  131. test("tf.sinh",
  132. x => tf.sinh(x),
  133. x => (Math.Sinh(x), Math.Cosh(x)),
  134. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  135. test("tf.cos",
  136. x => tf.cos(x),
  137. x => (Math.Cos(x), -Math.Sin(x)),
  138. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  139. test("tf.cosh",
  140. x => tf.cosh(x),
  141. x => (Math.Cosh(x), Math.Sinh(x)),
  142. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  143. test("tf.tanh",
  144. x => tf.tanh(x),
  145. x => (Math.Tanh(x), 1.0 - Math.Pow(Math.Tanh(x), 2.0)),
  146. new[] { -1.0, 0.0, 1.0, 1.5, 2.0 });
  147. test("tf.maximum",
  148. x => tf.maximum(x, tf.constant(0.0, dtype: x.dtype)),
  149. x => (Math.Max(x, 0.0), (x > 0.0) ? 1.0 : 0.0),
  150. new[] { -1.0, 1.0 });
  151. test("tf.minimum",
  152. x => tf.minimum(x, tf.constant(0.0, dtype: x.dtype)),
  153. x => (Math.Min(x, 0.0), (x < 0.0) ? 1.0 : 0.0),
  154. new[] { -1.0, 1.0 });
  155. }
  156. [TestMethod]
  157. public void testTanhGradient()
  158. {
  159. var a = tf.constant(1f);
  160. var b = tf.tanh(a);
  161. var g = tf.gradients(b, a);
  162. using (var sess = tf.Session())
  163. {
  164. var result = sess.run(g);
  165. var actual = result[0];
  166. Assert.AreEqual(actual, 0.41997434127f);
  167. }
  168. }
  169. [TestMethod]
  170. public void testLgammaGrad()
  171. {
  172. var a = tf.constant(5f);
  173. var b = tf.lgamma(a);
  174. var g = tf.gradients(b, a);
  175. using (var sess = tf.Session())
  176. {
  177. var result = sess.run(new object[] { g, b });
  178. var actualDeriv = result[0];
  179. var actual = result[1];
  180. Assert.AreEqual(actualDeriv, 1.5061177f);
  181. Assert.AreEqual(actual, 3.17805386f);
  182. }
  183. }
  184. [TestMethod]
  185. public void testSliceGrad()
  186. {
  187. var a = tf.tanh(tf.constant(new[] { 2f, 3f }, shape: new[] { 2, 1 }));
  188. var b = tf.strided_slice(a,
  189. tf.constant(new[] { 0 }, tf.int32, new[] { 1 }),
  190. tf.constant(new[] { 1 }, tf.int32, new[] { 1 }),
  191. tf.constant(new[] { 1 }, tf.int32, new[] { 1 })
  192. );
  193. var g = tf.gradients(b, a);
  194. using (var sess = tf.Session())
  195. {
  196. var result = sess.run(new object[] { g, b });
  197. var actualDeriv = np.squeeze(result[0]);
  198. var actual = np.squeeze(result[1]);
  199. Assert.AreEqual(actualDeriv, new float[] { 1, 0 });
  200. Assert.AreEqual(actual, 0.9640276f);
  201. }
  202. }
  203. [TestMethod]
  204. public void testConcatGrad()
  205. {
  206. var a1 = tf.constant(new[] { 2f }, shape: new[] { 1 });
  207. var a2 = tf.constant(new[] { 3f }, shape: new[] { 1 });
  208. var a = tf.concat(new List<Tensor>(new[] { a1, a2 }), 0);
  209. var g = tf.gradients(a, a1);
  210. using (var sess = tf.Session())
  211. {
  212. var result = sess.run(new object[] { g, a });
  213. var actualDeriv = result[0][0];
  214. var actual = result[1][0];
  215. Assert.AreEqual(actualDeriv, 1f);
  216. Assert.AreEqual(actual, 2f);
  217. }
  218. }
  219. [TestMethod]
  220. public void testStopGradientFunction()
  221. {
  222. var ap = tf.constant(1f);
  223. var b = tf.tanh(ap) + gen_array_ops.stop_gradient(ap);
  224. var g = tf.gradients(b, ap);
  225. using (var sess = tf.Session())
  226. {
  227. var result = sess.run(g);
  228. var actual = result[0];
  229. Assert.AreEqual(actual, 0.41997434127f);
  230. }
  231. }
  232. [Ignore("TODO")]
  233. [TestMethod]
  234. public void testUnusedOutput()
  235. {
  236. //def testUnusedOutput(self):
  237. // with ops.Graph().as_default():
  238. // w = constant(1.0, shape=[2, 2])
  239. // x = constant(1.0, shape=[2, 2])
  240. // wx = math_ops.matmul(w, x)
  241. // split_wx = array_ops.split(value=wx, num_or_size_splits=2, axis=0)
  242. // c = math_ops.reduce_sum(split_wx[1])
  243. // gw = gradients.gradients(c, [w])[0]
  244. // self.assertEquals("MatMul", gw.op.type)
  245. }
  246. [Ignore("TODO")]
  247. [TestMethod]
  248. public void testColocateGradients()
  249. {
  250. //def testColocateGradients(self):
  251. // with ops.Graph().as_default() as g:
  252. // w = constant(1.0, shape=[1, 1])
  253. // x = constant(1.0, shape=[1, 2])
  254. // with g.device("/device:GPU:0"):
  255. // wx = math_ops.matmul(w, x)
  256. // gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
  257. // self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
  258. }
  259. [Ignore("TODO")]
  260. [TestMethod]
  261. public void testColocateGradientsWithAggregation()
  262. {
  263. //def testColocateGradientsWithAggregation(self):
  264. // with ops.Graph().as_default() as g:
  265. // with g.device("/device:GPU:1"):
  266. // w = constant(1.0, shape=[1, 1])
  267. // x = constant(1.0, shape=[1, 2])
  268. // y = constant(1.0, shape=[1, 2])
  269. // wx = math_ops.matmul(w, x)
  270. // wy = math_ops.matmul(w, y)
  271. // with g.device("/device:GPU:0"):
  272. // z = wx + wy
  273. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  274. // self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
  275. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  276. // self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
  277. }
  278. [Ignore("TODO")]
  279. [TestMethod]
  280. public void testColocateGradientsWithAggregationInMultipleDevices()
  281. {
  282. //def testColocateGradientsWithAggregationInMultipleDevices(self):
  283. // with ops.Graph().as_default() as g:
  284. // with g.device("/device:GPU:1"):
  285. // w = constant(1.0, shape=[1, 1])
  286. // x = constant(1.0, shape=[1, 2])
  287. // y = constant(1.0, shape=[1, 2])
  288. // with g.device("/task:1"):
  289. // wx = math_ops.matmul(w, x)
  290. // with g.device("/task:2"):
  291. // wy = math_ops.matmul(w, y)
  292. // with g.device("/device:GPU:0"):
  293. // z = wx + wy
  294. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  295. // self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
  296. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  297. // self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
  298. }
  299. [Ignore("TODO")]
  300. [TestMethod]
  301. public void testColocateGradientsWithGateGradients()
  302. {
  303. //def testColocateGradientsWithGateGradients(self):
  304. // if not test_util.is_gpu_available():
  305. // self.skipTest("No GPU available")
  306. // with ops.Graph().as_default() as g:
  307. // with g.device("/device:CPU:0"):
  308. // x = constant(1.0, shape=[1, 1])
  309. // y = constant(1.0, shape=[1, 1])
  310. // s = x + y
  311. // with g.device("/device:GPU:0"):
  312. // z = math_ops.reduce_sum(s)
  313. // gz_x = gradients.gradients(z, [x], colocate_gradients_with_ops=True,
  314. // gate_gradients=True)[0]
  315. // with session.Session():
  316. // # Make sure the placer doesn't complain.
  317. // self.evaluate(gz_x)
  318. }
  319. [Ignore("TODO")]
  320. [TestMethod]
  321. public void testBoundaryStop()
  322. {
  323. //def testBoundaryStop(self):
  324. // # Test that we don't differentiate 'x'. The gradient function for 'x' is
  325. // # set explicitly to None so we will get an exception if the gradient code
  326. // # tries to differentiate 'x'.
  327. // with ops.Graph().as_default():
  328. // c = constant(1.0)
  329. // x = array_ops.identity(c)
  330. // y = x + 1.0
  331. // z = y + 1
  332. // grads = gradients.gradients(z, [x])
  333. // self.assertTrue(all(x is not None for x in grads))
  334. }
  335. [Ignore("TODO")]
  336. [TestMethod]
  337. public void testBoundaryContinue()
  338. {
  339. //@test_util.run_v1_only("b/120545219")
  340. //def testBoundaryContinue(self):
  341. // # Test that we differentiate both 'x' and 'y' correctly when x is a
  342. // # predecessor of y.
  343. // with self.cached_session():
  344. // x = constant(1.0)
  345. // y = x * 2.0
  346. // z = y * 3.0
  347. // grads = gradients.gradients(z, [x, y])
  348. // self.assertTrue(all(x is not None for x in grads))
  349. // self.assertEqual(6.0, grads[0].eval())
  350. }
  351. [Ignore("TODO")]
  352. [TestMethod]
  353. public void testAggregationMethodAccumulateN()
  354. {
  355. //@test_util.run_v1_only("b/120545219")
  356. //def testAggregationMethodAccumulateN(self):
  357. // with self.cached_session():
  358. // x = constant(1.0)
  359. // y = x * 2.0
  360. // z = y + y + y + y + y + y + y + y + y + y
  361. // grads = gradients.gradients(
  362. // z, [x, y],
  363. // aggregation_method=gradients.AggregationMethod.
  364. // EXPERIMENTAL_ACCUMULATE_N)
  365. // self.assertTrue(all(x is not None for x in grads))
  366. // self.assertEqual(20.0, grads[0].eval())
  367. // self.assertEqual(10.0, grads[1].eval())
  368. }
  369. [Ignore("TODO")]
  370. [TestMethod]
  371. public void testAggregationMethodAddN()
  372. {
  373. //@test_util.run_v1_only("b/120545219")
  374. //def testAggregationMethodAddN(self):
  375. // with self.cached_session():
  376. // x = constant(1.0)
  377. // y = x * 2.0
  378. // z = y + y + y + y + y + y + y + y + y + y
  379. // grads = gradients.gradients(
  380. // z, [x, y], aggregation_method=gradients.AggregationMethod.ADD_N)
  381. // self.assertTrue(all(x is not None for x in grads))
  382. // self.assertEqual(20.0, grads[0].eval())
  383. // self.assertEqual(10.0, grads[1].eval())
  384. }
  385. [Ignore("TODO")]
  386. [TestMethod]
  387. public void testAggregationMethodTree()
  388. {
  389. //@test_util.run_v1_only("b/120545219")
  390. //def testAggregationMethodTree(self):
  391. // with self.cached_session():
  392. // x = constant(1.0)
  393. // y = x * 2.0
  394. // z = y + y + y + y + y + y + y + y + y + y
  395. // grads = gradients.gradients(
  396. // z, [x, y],
  397. // aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
  398. // self.assertTrue(all(x is not None for x in grads))
  399. // self.assertEqual(20.0, grads[0].eval())
  400. // self.assertEqual(10.0, grads[1].eval())
  401. }
  402. [Ignore("TODO")]
  403. [TestMethod]
  404. public void testNoGradientForStringOutputs()
  405. {
  406. //def testNoGradientForStringOutputs(self):
  407. // with ops.Graph().as_default():
  408. // def _TestOpGrad(_, float_grad, string_grad):
  409. // """Gradient function for TestStringOutput."""
  410. // self.assertEquals(float_grad.dtype, dtypes.float32)
  411. // self.assertFalse(string_grad)
  412. // return float_grad
  413. // ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
  414. // c = constant(1.0)
  415. // x, _ = test_ops.test_string_output(c)
  416. // z = x * 2.0
  417. // w = z * 3.0
  418. // grads = gradients.gradients(z, [c])
  419. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  420. // grads = gradients.gradients(w, [c])
  421. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  422. }
  423. [Ignore("TODO")]
  424. [TestMethod]
  425. public void testSingletonIndexedSlices()
  426. {
  427. //def testSingletonIndexedSlices(self):
  428. // with ops.Graph().as_default():
  429. // x = array_ops.placeholder(dtypes.float32)
  430. // y = array_ops.identity(x)
  431. // dy = ops.IndexedSlices(
  432. // array_ops.placeholder(dtypes.float32),
  433. // array_ops.placeholder(dtypes.int32))
  434. // dx, = gradients.gradients(y, x, grad_ys=dy)
  435. // # The IndexedSlices gradient of tf.identity is the identity map.
  436. // with self.cached_session() as sess:
  437. // vdx, vdy = sess.run(
  438. // [dx, dy], feed_dict={x: [1.0], dy.indices: [0], dy.values: [2.0]})
  439. // self.assertEqual(vdx, vdy)
  440. }
  441. [Ignore("TODO")]
  442. [TestMethod]
  443. public void testNonDifferentiableSwitchInWhileLoop()
  444. {
  445. //@test_util.run_v1_only("b/120545219")
  446. //def testNonDifferentiableSwitchInWhileLoop(self):
  447. // with ops.Graph().as_default():
  448. // v = array_ops.placeholder(dtypes.float32, [])
  449. // def _Step(i, a, ta):
  450. // a += math_ops.cast(v, dtypes.int32)
  451. // return (i + 1, a, ta.write(i, a))
  452. // n = 4
  453. // i, _, ta = control_flow_ops.while_loop(
  454. // lambda i, *_: i < n,
  455. // _Step, [0, 0, tensor_array_ops.TensorArray(
  456. // dtypes.int32, size=n)])
  457. // target = ta.read(i - 1)
  458. // grad, = gradients.gradients(target, v)
  459. // self.assertIsNone(grad)
  460. }
  461. [Ignore("TODO")]
  462. [TestMethod]
  463. public void testVariableReadValueGradient()
  464. {
  465. //def testVariableReadValueGradient(self):
  466. // with ops.Graph().as_default():
  467. // init = constant_op.constant(100.0)
  468. // var = variables.Variable(init)
  469. // gradient = gradients.gradients(var.read_value(), var)
  470. // self.assertIsNotNone(gradient)
  471. }
  472. [Ignore("TODO")]
  473. [TestMethod]
  474. public void testVariableAsGraphElementGradient()
  475. {
  476. //def testVariableAsGraphElementGradient(self):
  477. // with ops.Graph().as_default() as graph:
  478. // init = constant_op.constant(100.0)
  479. // var = variables.Variable(init)
  480. // gradient = gradients.gradients(graph.as_graph_element(var), var)
  481. // self.assertIsNotNone(gradient)
  482. }
  483. [Ignore("TODO")]
  484. [TestMethod]
  485. public void testVariableRefGradient()
  486. {
  487. //@test_util.run_v1_only("b/120545219")
  488. //def testVariableRefGradient(self):
  489. // with ops.Graph().as_default():
  490. // init = constant_op.constant(100.0)
  491. // var = variables.VariableV1(init)
  492. // gradient = gradients.gradients(var._ref(), var)
  493. // self.assertIsNotNone(gradient)
  494. }
  495. [Ignore("TODO")]
  496. [TestMethod]
  497. public void testDependentYs()
  498. {
  499. //@test_util.run_v1_only("b/120545219")
  500. //def testDependentYs(self):
  501. // with self.cached_session():
  502. // x = constant_op.constant(3.0)
  503. // y = math_ops.square(x)
  504. // y1 = math_ops.square(y)
  505. // y2 = math_ops.square(y1)
  506. // g = gradients.gradients([y, y2], x)
  507. // self.assertAllClose(17502.0, g[0].eval())
  508. // g = gradients.gradients(y + y2, x)
  509. // self.assertAllClose(17502.0, g[0].eval())
  510. // z = array_ops.identity(y)
  511. // z2 = array_ops.identity(y2)
  512. // g = gradients.gradients([z, z2], x)
  513. // self.assertAllClose(17502.0, g[0].eval())
  514. }
  515. [Ignore("TODO")]
  516. [TestMethod]
  517. public void testPartialDerivatives()
  518. {
  519. //@test_util.run_v1_only("b/120545219")
  520. //def testPartialDerivatives(self):
  521. // with self.cached_session():
  522. // x = constant_op.constant(1.)
  523. // y = 2 * x
  524. // z = x + y
  525. // totalg = gradients.gradients(z, [x, y])
  526. // self.assertEqual([3.0, 1.0], [g.eval() for g in totalg])
  527. // partialg = gradients.gradients(z, [x, y], stop_gradients=[x, y])
  528. // self.assertEqual([1.0, 1.0], [g.eval() for g in partialg])
  529. }
  530. [Ignore("TODO")]
  531. [TestMethod]
  532. public void testStopGradients()
  533. {
  534. //@test_util.run_v1_only("b/120545219")
  535. //def testStopGradients(self):
  536. // def _MakeGraph(rng, stop_gradients=()):
  537. // def _FunctionOf(xs, k=3):
  538. // return ops.convert_to_tensor(
  539. // sum(math_ops.matmul(rng.rand(k, k), x) for x in xs)
  540. // + rng.rand(k, k))
  541. // a = _FunctionOf([])
  542. // if "a" in stop_gradients: a = array_ops.stop_gradient(a)
  543. // b = _FunctionOf([a])
  544. // if "b" in stop_gradients: b = array_ops.stop_gradient(b)
  545. // c = _FunctionOf([a, b])
  546. // if "c" in stop_gradients: c = array_ops.stop_gradient(c)
  547. // d = _FunctionOf([b, c])
  548. // if "d" in stop_gradients: d = array_ops.stop_gradient(d)
  549. // return dict(a=a, b=b, c=c, d=d)
  550. // def _Gradients(ys, xs, **kwargs):
  551. // dydxs = gradients.gradients(ys, xs, **kwargs)
  552. // dydxs = [0. * x if dydx is None else dydx
  553. // for x, dydx in zip(xs, dydxs)]
  554. // return dydxs
  555. // seed = np.random.randint(1000)
  556. // cases = []
  557. // subsets = [""] + "a b c d ab ac ad bc bd cd abc abd acd bcd abcd".split()
  558. // graph = _MakeGraph(np.random.RandomState(seed))
  559. // for constants in subsets:
  560. // graph_with_stops = _MakeGraph(np.random.RandomState(seed), constants)
  561. // for variables_ in subsets:
  562. // # compute the gradient when stopped using tf.stop_gradients
  563. // grad1 = _Gradients([graph_with_stops["d"]],
  564. // [graph_with_stops[v] for v in variables_])
  565. // # compute the gradient when stopped using the stop_gradients kwarg
  566. // grad2 = _Gradients([graph["d"]],
  567. // [graph[v] for v in variables_],
  568. // stop_gradients=[graph[v] for v in constants])
  569. // cases.append(dict(grad1=grad1, grad2=grad2,
  570. // constants=constants, variables=variables_))
  571. // # evaluate all tensors in one call to session.run for speed
  572. // with self.cached_session() as sess:
  573. // results = sess.run([(case["grad1"], case["grad2"]) for case in cases])
  574. // for (npgrad1, npgrad2), case in zip(results, cases):
  575. // for a, b in zip(npgrad1, npgrad2):
  576. // np.testing.assert_allclose(a, b)
  577. }
  578. [Ignore("TODO")]
  579. [TestMethod]
  580. public void testUnconnectedGradientsNoneUnconnectedGradients()
  581. {
  582. //def testUnconnectedGradientsNoneUnconnectedGradients(self):
  583. // with ops.Graph().as_default():
  584. // x = constant(1.0, shape=[2, 2])
  585. // y = constant(3.0, shape=[3, 1])
  586. // grad = gradients.gradients(
  587. // [y], [x], unconnected_gradients="none")
  588. // self.assertIsNone(grad[0])
  589. }
  590. [Ignore("TODO")]
  591. [TestMethod]
  592. public void testUnconnectedGradientsZerosUnconnectedGradients()
  593. {
  594. //def testUnconnectedGradientsZerosUnconnectedGradients(self):
  595. // with ops.Graph().as_default():
  596. // x = constant(1.0, shape=[2, 2])
  597. // y = constant(3.0, shape=[3, 1])
  598. // grads = gradients.gradients(
  599. // [y], [x], unconnected_gradients="zero")
  600. // with self.cached_session() as sess:
  601. // self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(grads)[0])
  602. }
  603. [Ignore("TODO")]
  604. [TestMethod]
  605. public void testUnconnectedGradientsZeroConnectedGradients()
  606. {
  607. //def testUnconnectedGradientsZeroConnectedGradients(self):
  608. // with ops.Graph().as_default():
  609. // x = constant(1.0)
  610. // y = x * 3.0
  611. // grad = gradients.gradients(
  612. // [y], [x], unconnected_gradients="zero")
  613. // with self.cached_session() as sess:
  614. // self.assertEquals(3.0, self.evaluate(grad)[0])
  615. }
  616. [Ignore("TODO")]
  617. [TestMethod]
  618. public void testUnknownUnconnectedGradientsValueGiven()
  619. {
  620. //def testUnknownUnconnectedGradientsValueGiven(self):
  621. // with ops.Graph().as_default():
  622. // x = constant(1.0)
  623. // y = constant(1.0)
  624. // with self.assertRaisesRegexp(
  625. // ValueError, "Unknown value for unconnected_gradients: 'nonsense'"):
  626. // gradients.gradients([y], [x], unconnected_gradients="nonsense")
  627. }
  628. /*
  629. */
  630. }
  631. }