You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

GradientsTest.cs 24 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618
  1. using System;
  2. using System.Collections.Generic;
  3. using System.Linq;
  4. using Microsoft.VisualStudio.TestTools.UnitTesting;
  5. using NumSharp;
  6. using Tensorflow;
  7. using static Tensorflow.Binding;
  8. namespace TensorFlowNET.UnitTest.gradients_test
  9. {
  10. [TestClass]
  11. public class GradientsTest : PythonTest
  12. {
  13. [Ignore("TODO")]
  14. [TestMethod]
  15. public void testGradients()
  16. {
  17. var g = tf.Graph().as_default();
  18. var inp = tf.constant(1.0, shape: new[] { 32, 100 }, name: "in");
  19. var w = tf.constant(1.0, shape: new[] { 100, 10 }, name: "w");
  20. var b = tf.constant(1.0, shape: new[] { 10 }, name: "b");
  21. var xw = math_ops.matmul(inp, w, name: "xw");
  22. var h = nn_ops.bias_add(xw, b, name: "h");
  23. var w_grad = gradients_impl.gradients(new[] { h }, new[] { w })[0];
  24. self.assertEquals("MatMul", w_grad.op.type);
  25. // TODO: Operation._original_op
  26. //self.assertEquals(w_grad.op._original_op, xw.op);
  27. self.assertTrue((bool)w_grad.op.get_attr("transpose_a"));
  28. self.assertFalse((bool)w_grad.op.get_attr("transpose_b"));
  29. }
  30. [TestMethod]
  31. public void testBatchMatMulGradient()
  32. {
  33. var a = tf.constant(np.array(Enumerable.Range(1, 18).Select(elem => (float)elem).ToArray()), shape: new[] { 2, 3, 3 });
  34. var b = tf.divide(a, tf.constant(2.0f));
  35. var c = tf.batch_matmul(a, b);
  36. var g = tf.gradients(c, new[] { a, b }, stop_gradients: new[] { a, b });
  37. var checkG = new[]
  38. {
  39. 3.0f, 7.5f, 12.0f,
  40. 3.0f, 7.5f, 12.0f,
  41. 3.0f, 7.5f, 12.0f,
  42. 16.5f, 21.0f, 25.5f,
  43. 16.5f, 21.0f, 25.5f,
  44. 16.5f, 21.0f, 25.5f,
  45. 12.0f, 12.0f, 12.0f,
  46. 15.0f, 15.0f, 15.0f,
  47. 18.0f, 18.0f, 18.0f,
  48. 39.0f, 39.0f, 39.0f,
  49. 42.0f, 42.0f, 42.0f,
  50. 45.0f, 45.0f, 45.0f
  51. };
  52. using (var sess = tf.Session())
  53. {
  54. var result = sess.run(g);
  55. var resultList = result[0].GetData<float>().ToList();
  56. resultList.AddRange(result[1].GetData<float>());
  57. Console.WriteLine(result.ToString());
  58. CollectionAssert.AreEqual(resultList.ToArray(), checkG);
  59. }
  60. }
  61. [TestMethod]
  62. public void testTanhGradient()
  63. {
  64. var a = tf.constant(1f);
  65. var b = tf.tanh(a);
  66. var g = tf.gradients(b, a);
  67. using (var sess = tf.Session())
  68. {
  69. var result = sess.run(g);
  70. var actual = result[0].GetData<float>()[0];
  71. self.assertEquals(0.41997434127f, actual);
  72. }
  73. }
  74. [TestMethod]
  75. public void testLgammaGrad()
  76. {
  77. var a = tf.constant(5f);
  78. var b = tf.lgamma(a);
  79. var g = tf.gradients(b, a);
  80. using (var sess = tf.Session())
  81. {
  82. var result = sess.run(new object[] { g, b });
  83. var actualDeriv = result[0].GetData<float>()[0];
  84. var actual = result[1].GetData<float>()[0];
  85. self.assertEquals(1.5061177f, actualDeriv);
  86. self.assertEquals(3.17805386f, actual);
  87. }
  88. }
  89. [TestMethod]
  90. public void testSliceGrad()
  91. {
  92. var a = tf.tanh(tf.constant(new[] { 2f, 3f }, shape: new[] { 2, 1 }));
  93. var b = tf.strided_slice(a,
  94. tf.constant(new[] { 0 }, tf.int32, new[] { 1 }),
  95. tf.constant(new[] { 1 }, tf.int32, new[] { 1 }),
  96. tf.constant(new[] { 1 }, tf.int32, new[] { 1 })
  97. );
  98. var g = tf.gradients(b, a);
  99. using (var sess = tf.Session())
  100. {
  101. var result = sess.run(new object[] { g, b });
  102. var actualDeriv = np.squeeze(result[0]);
  103. var actual = np.squeeze(result[1]);
  104. self.assertEquals(new float[] { 1, 0 }, new float[] { actualDeriv[0], actualDeriv[1] });
  105. self.assertEquals(0.9640276f, (float)actual);
  106. }
  107. }
  108. [TestMethod]
  109. public void testConcatGrad()
  110. {
  111. var a1 = tf.constant(new[] { 2f }, shape: new[] { 1 });
  112. var a2 = tf.constant(new[] { 3f }, shape: new[] { 1 });
  113. var a = tf.concat(new List<Tensor>(new[] { a1, a2 }), 0);
  114. var g = tf.gradients(a, a1);
  115. using (var sess = tf.Session())
  116. {
  117. var result = sess.run(new object[] { g, a });
  118. var actualDeriv = result[0].GetData<float>()[0];
  119. var actual = result[1].GetData<float>()[0];
  120. self.assertEquals(1f, actualDeriv);
  121. self.assertEquals(2f, actual);
  122. }
  123. }
  124. [TestMethod]
  125. public void testStopGradientFunction()
  126. {
  127. var ap = tf.constant(1f);
  128. var b = tf.tanh(ap) + gen_array_ops.stop_gradient(ap);
  129. var g = tf.gradients(b, ap);
  130. using (var sess = tf.Session())
  131. {
  132. var result = sess.run(g);
  133. var actual = result[0].GetData<float>()[0];
  134. self.assertEquals(0.41997434127f, actual);
  135. }
  136. }
  137. [Ignore("TODO")]
  138. [TestMethod]
  139. public void testUnusedOutput()
  140. {
  141. //def testUnusedOutput(self):
  142. // with ops.Graph().as_default():
  143. // w = constant(1.0, shape=[2, 2])
  144. // x = constant(1.0, shape=[2, 2])
  145. // wx = math_ops.matmul(w, x)
  146. // split_wx = array_ops.split(value=wx, num_or_size_splits=2, axis=0)
  147. // c = math_ops.reduce_sum(split_wx[1])
  148. // gw = gradients.gradients(c, [w])[0]
  149. // self.assertEquals("MatMul", gw.op.type)
  150. }
  151. [Ignore("TODO")]
  152. [TestMethod]
  153. public void testColocateGradients()
  154. {
  155. //def testColocateGradients(self):
  156. // with ops.Graph().as_default() as g:
  157. // w = constant(1.0, shape=[1, 1])
  158. // x = constant(1.0, shape=[1, 2])
  159. // with g.device("/device:GPU:0"):
  160. // wx = math_ops.matmul(w, x)
  161. // gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
  162. // self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
  163. }
  164. [Ignore("TODO")]
  165. [TestMethod]
  166. public void testColocateGradientsWithAggregation()
  167. {
  168. //def testColocateGradientsWithAggregation(self):
  169. // with ops.Graph().as_default() as g:
  170. // with g.device("/device:GPU:1"):
  171. // w = constant(1.0, shape=[1, 1])
  172. // x = constant(1.0, shape=[1, 2])
  173. // y = constant(1.0, shape=[1, 2])
  174. // wx = math_ops.matmul(w, x)
  175. // wy = math_ops.matmul(w, y)
  176. // with g.device("/device:GPU:0"):
  177. // z = wx + wy
  178. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  179. // self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
  180. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  181. // self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
  182. }
  183. [Ignore("TODO")]
  184. [TestMethod]
  185. public void testColocateGradientsWithAggregationInMultipleDevices()
  186. {
  187. //def testColocateGradientsWithAggregationInMultipleDevices(self):
  188. // with ops.Graph().as_default() as g:
  189. // with g.device("/device:GPU:1"):
  190. // w = constant(1.0, shape=[1, 1])
  191. // x = constant(1.0, shape=[1, 2])
  192. // y = constant(1.0, shape=[1, 2])
  193. // with g.device("/task:1"):
  194. // wx = math_ops.matmul(w, x)
  195. // with g.device("/task:2"):
  196. // wy = math_ops.matmul(w, y)
  197. // with g.device("/device:GPU:0"):
  198. // z = wx + wy
  199. // gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
  200. // self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
  201. // gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
  202. // self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
  203. }
  204. [Ignore("TODO")]
  205. [TestMethod]
  206. public void testColocateGradientsWithGateGradients()
  207. {
  208. //def testColocateGradientsWithGateGradients(self):
  209. // if not test_util.is_gpu_available():
  210. // self.skipTest("No GPU available")
  211. // with ops.Graph().as_default() as g:
  212. // with g.device("/device:CPU:0"):
  213. // x = constant(1.0, shape=[1, 1])
  214. // y = constant(1.0, shape=[1, 1])
  215. // s = x + y
  216. // with g.device("/device:GPU:0"):
  217. // z = math_ops.reduce_sum(s)
  218. // gz_x = gradients.gradients(z, [x], colocate_gradients_with_ops=True,
  219. // gate_gradients=True)[0]
  220. // with session.Session():
  221. // # Make sure the placer doesn't complain.
  222. // self.evaluate(gz_x)
  223. }
  224. [Ignore("TODO")]
  225. [TestMethod]
  226. public void testBoundaryStop()
  227. {
  228. //def testBoundaryStop(self):
  229. // # Test that we don't differentiate 'x'. The gradient function for 'x' is
  230. // # set explicitly to None so we will get an exception if the gradient code
  231. // # tries to differentiate 'x'.
  232. // with ops.Graph().as_default():
  233. // c = constant(1.0)
  234. // x = array_ops.identity(c)
  235. // y = x + 1.0
  236. // z = y + 1
  237. // grads = gradients.gradients(z, [x])
  238. // self.assertTrue(all(x is not None for x in grads))
  239. }
  240. [Ignore("TODO")]
  241. [TestMethod]
  242. public void testBoundaryContinue()
  243. {
  244. //@test_util.run_v1_only("b/120545219")
  245. //def testBoundaryContinue(self):
  246. // # Test that we differentiate both 'x' and 'y' correctly when x is a
  247. // # predecessor of y.
  248. // with self.cached_session():
  249. // x = constant(1.0)
  250. // y = x * 2.0
  251. // z = y * 3.0
  252. // grads = gradients.gradients(z, [x, y])
  253. // self.assertTrue(all(x is not None for x in grads))
  254. // self.assertEqual(6.0, grads[0].eval())
  255. }
  256. [Ignore("TODO")]
  257. [TestMethod]
  258. public void testAggregationMethodAccumulateN()
  259. {
  260. //@test_util.run_v1_only("b/120545219")
  261. //def testAggregationMethodAccumulateN(self):
  262. // with self.cached_session():
  263. // x = constant(1.0)
  264. // y = x * 2.0
  265. // z = y + y + y + y + y + y + y + y + y + y
  266. // grads = gradients.gradients(
  267. // z, [x, y],
  268. // aggregation_method=gradients.AggregationMethod.
  269. // EXPERIMENTAL_ACCUMULATE_N)
  270. // self.assertTrue(all(x is not None for x in grads))
  271. // self.assertEqual(20.0, grads[0].eval())
  272. // self.assertEqual(10.0, grads[1].eval())
  273. }
  274. [Ignore("TODO")]
  275. [TestMethod]
  276. public void testAggregationMethodAddN()
  277. {
  278. //@test_util.run_v1_only("b/120545219")
  279. //def testAggregationMethodAddN(self):
  280. // with self.cached_session():
  281. // x = constant(1.0)
  282. // y = x * 2.0
  283. // z = y + y + y + y + y + y + y + y + y + y
  284. // grads = gradients.gradients(
  285. // z, [x, y], aggregation_method=gradients.AggregationMethod.ADD_N)
  286. // self.assertTrue(all(x is not None for x in grads))
  287. // self.assertEqual(20.0, grads[0].eval())
  288. // self.assertEqual(10.0, grads[1].eval())
  289. }
  290. [Ignore("TODO")]
  291. [TestMethod]
  292. public void testAggregationMethodTree()
  293. {
  294. //@test_util.run_v1_only("b/120545219")
  295. //def testAggregationMethodTree(self):
  296. // with self.cached_session():
  297. // x = constant(1.0)
  298. // y = x * 2.0
  299. // z = y + y + y + y + y + y + y + y + y + y
  300. // grads = gradients.gradients(
  301. // z, [x, y],
  302. // aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
  303. // self.assertTrue(all(x is not None for x in grads))
  304. // self.assertEqual(20.0, grads[0].eval())
  305. // self.assertEqual(10.0, grads[1].eval())
  306. }
  307. [Ignore("TODO")]
  308. [TestMethod]
  309. public void testNoGradientForStringOutputs()
  310. {
  311. //def testNoGradientForStringOutputs(self):
  312. // with ops.Graph().as_default():
  313. // def _TestOpGrad(_, float_grad, string_grad):
  314. // """Gradient function for TestStringOutput."""
  315. // self.assertEquals(float_grad.dtype, dtypes.float32)
  316. // self.assertFalse(string_grad)
  317. // return float_grad
  318. // ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
  319. // c = constant(1.0)
  320. // x, _ = test_ops.test_string_output(c)
  321. // z = x * 2.0
  322. // w = z * 3.0
  323. // grads = gradients.gradients(z, [c])
  324. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  325. // grads = gradients.gradients(w, [c])
  326. // self.assertTrue(isinstance(grads[0], ops.Tensor))
  327. }
  328. [Ignore("TODO")]
  329. [TestMethod]
  330. public void testSingletonIndexedSlices()
  331. {
  332. //def testSingletonIndexedSlices(self):
  333. // with ops.Graph().as_default():
  334. // x = array_ops.placeholder(dtypes.float32)
  335. // y = array_ops.identity(x)
  336. // dy = ops.IndexedSlices(
  337. // array_ops.placeholder(dtypes.float32),
  338. // array_ops.placeholder(dtypes.int32))
  339. // dx, = gradients.gradients(y, x, grad_ys=dy)
  340. // # The IndexedSlices gradient of tf.identity is the identity map.
  341. // with self.cached_session() as sess:
  342. // vdx, vdy = sess.run(
  343. // [dx, dy], feed_dict={x: [1.0], dy.indices: [0], dy.values: [2.0]})
  344. // self.assertEqual(vdx, vdy)
  345. }
  346. [Ignore("TODO")]
  347. [TestMethod]
  348. public void testNonDifferentiableSwitchInWhileLoop()
  349. {
  350. //@test_util.run_v1_only("b/120545219")
  351. //def testNonDifferentiableSwitchInWhileLoop(self):
  352. // with ops.Graph().as_default():
  353. // v = array_ops.placeholder(dtypes.float32, [])
  354. // def _Step(i, a, ta):
  355. // a += math_ops.cast(v, dtypes.int32)
  356. // return (i + 1, a, ta.write(i, a))
  357. // n = 4
  358. // i, _, ta = control_flow_ops.while_loop(
  359. // lambda i, *_: i < n,
  360. // _Step, [0, 0, tensor_array_ops.TensorArray(
  361. // dtypes.int32, size=n)])
  362. // target = ta.read(i - 1)
  363. // grad, = gradients.gradients(target, v)
  364. // self.assertIsNone(grad)
  365. }
  366. [Ignore("TODO")]
  367. [TestMethod]
  368. public void testVariableReadValueGradient()
  369. {
  370. //def testVariableReadValueGradient(self):
  371. // with ops.Graph().as_default():
  372. // init = constant_op.constant(100.0)
  373. // var = variables.Variable(init)
  374. // gradient = gradients.gradients(var.read_value(), var)
  375. // self.assertIsNotNone(gradient)
  376. }
  377. [Ignore("TODO")]
  378. [TestMethod]
  379. public void testVariableAsGraphElementGradient()
  380. {
  381. //def testVariableAsGraphElementGradient(self):
  382. // with ops.Graph().as_default() as graph:
  383. // init = constant_op.constant(100.0)
  384. // var = variables.Variable(init)
  385. // gradient = gradients.gradients(graph.as_graph_element(var), var)
  386. // self.assertIsNotNone(gradient)
  387. }
  388. [Ignore("TODO")]
  389. [TestMethod]
  390. public void testVariableRefGradient()
  391. {
  392. //@test_util.run_v1_only("b/120545219")
  393. //def testVariableRefGradient(self):
  394. // with ops.Graph().as_default():
  395. // init = constant_op.constant(100.0)
  396. // var = variables.VariableV1(init)
  397. // gradient = gradients.gradients(var._ref(), var)
  398. // self.assertIsNotNone(gradient)
  399. }
  400. [Ignore("TODO")]
  401. [TestMethod]
  402. public void testDependentYs()
  403. {
  404. //@test_util.run_v1_only("b/120545219")
  405. //def testDependentYs(self):
  406. // with self.cached_session():
  407. // x = constant_op.constant(3.0)
  408. // y = math_ops.square(x)
  409. // y1 = math_ops.square(y)
  410. // y2 = math_ops.square(y1)
  411. // g = gradients.gradients([y, y2], x)
  412. // self.assertAllClose(17502.0, g[0].eval())
  413. // g = gradients.gradients(y + y2, x)
  414. // self.assertAllClose(17502.0, g[0].eval())
  415. // z = array_ops.identity(y)
  416. // z2 = array_ops.identity(y2)
  417. // g = gradients.gradients([z, z2], x)
  418. // self.assertAllClose(17502.0, g[0].eval())
  419. }
  420. [Ignore("TODO")]
  421. [TestMethod]
  422. public void testPartialDerivatives()
  423. {
  424. //@test_util.run_v1_only("b/120545219")
  425. //def testPartialDerivatives(self):
  426. // with self.cached_session():
  427. // x = constant_op.constant(1.)
  428. // y = 2 * x
  429. // z = x + y
  430. // totalg = gradients.gradients(z, [x, y])
  431. // self.assertEqual([3.0, 1.0], [g.eval() for g in totalg])
  432. // partialg = gradients.gradients(z, [x, y], stop_gradients=[x, y])
  433. // self.assertEqual([1.0, 1.0], [g.eval() for g in partialg])
  434. }
  435. [Ignore("TODO")]
  436. [TestMethod]
  437. public void testStopGradients()
  438. {
  439. //@test_util.run_v1_only("b/120545219")
  440. //def testStopGradients(self):
  441. // def _MakeGraph(rng, stop_gradients=()):
  442. // def _FunctionOf(xs, k=3):
  443. // return ops.convert_to_tensor(
  444. // sum(math_ops.matmul(rng.rand(k, k), x) for x in xs)
  445. // + rng.rand(k, k))
  446. // a = _FunctionOf([])
  447. // if "a" in stop_gradients: a = array_ops.stop_gradient(a)
  448. // b = _FunctionOf([a])
  449. // if "b" in stop_gradients: b = array_ops.stop_gradient(b)
  450. // c = _FunctionOf([a, b])
  451. // if "c" in stop_gradients: c = array_ops.stop_gradient(c)
  452. // d = _FunctionOf([b, c])
  453. // if "d" in stop_gradients: d = array_ops.stop_gradient(d)
  454. // return dict(a=a, b=b, c=c, d=d)
  455. // def _Gradients(ys, xs, **kwargs):
  456. // dydxs = gradients.gradients(ys, xs, **kwargs)
  457. // dydxs = [0. * x if dydx is None else dydx
  458. // for x, dydx in zip(xs, dydxs)]
  459. // return dydxs
  460. // seed = np.random.randint(1000)
  461. // cases = []
  462. // subsets = [""] + "a b c d ab ac ad bc bd cd abc abd acd bcd abcd".split()
  463. // graph = _MakeGraph(np.random.RandomState(seed))
  464. // for constants in subsets:
  465. // graph_with_stops = _MakeGraph(np.random.RandomState(seed), constants)
  466. // for variables_ in subsets:
  467. // # compute the gradient when stopped using tf.stop_gradients
  468. // grad1 = _Gradients([graph_with_stops["d"]],
  469. // [graph_with_stops[v] for v in variables_])
  470. // # compute the gradient when stopped using the stop_gradients kwarg
  471. // grad2 = _Gradients([graph["d"]],
  472. // [graph[v] for v in variables_],
  473. // stop_gradients=[graph[v] for v in constants])
  474. // cases.append(dict(grad1=grad1, grad2=grad2,
  475. // constants=constants, variables=variables_))
  476. // # evaluate all tensors in one call to session.run for speed
  477. // with self.cached_session() as sess:
  478. // results = sess.run([(case["grad1"], case["grad2"]) for case in cases])
  479. // for (npgrad1, npgrad2), case in zip(results, cases):
  480. // for a, b in zip(npgrad1, npgrad2):
  481. // np.testing.assert_allclose(a, b)
  482. }
  483. [Ignore("TODO")]
  484. [TestMethod]
  485. public void testUnconnectedGradientsNoneUnconnectedGradients()
  486. {
  487. //def testUnconnectedGradientsNoneUnconnectedGradients(self):
  488. // with ops.Graph().as_default():
  489. // x = constant(1.0, shape=[2, 2])
  490. // y = constant(3.0, shape=[3, 1])
  491. // grad = gradients.gradients(
  492. // [y], [x], unconnected_gradients="none")
  493. // self.assertIsNone(grad[0])
  494. }
  495. [Ignore("TODO")]
  496. [TestMethod]
  497. public void testUnconnectedGradientsZerosUnconnectedGradients()
  498. {
  499. //def testUnconnectedGradientsZerosUnconnectedGradients(self):
  500. // with ops.Graph().as_default():
  501. // x = constant(1.0, shape=[2, 2])
  502. // y = constant(3.0, shape=[3, 1])
  503. // grads = gradients.gradients(
  504. // [y], [x], unconnected_gradients="zero")
  505. // with self.cached_session() as sess:
  506. // self.assertAllEqual([[0.0, 0.0], [0.0, 0.0]], self.evaluate(grads)[0])
  507. }
  508. [Ignore("TODO")]
  509. [TestMethod]
  510. public void testUnconnectedGradientsZeroConnectedGradients()
  511. {
  512. //def testUnconnectedGradientsZeroConnectedGradients(self):
  513. // with ops.Graph().as_default():
  514. // x = constant(1.0)
  515. // y = x * 3.0
  516. // grad = gradients.gradients(
  517. // [y], [x], unconnected_gradients="zero")
  518. // with self.cached_session() as sess:
  519. // self.assertEquals(3.0, self.evaluate(grad)[0])
  520. }
  521. [Ignore("TODO")]
  522. [TestMethod]
  523. public void testUnknownUnconnectedGradientsValueGiven()
  524. {
  525. //def testUnknownUnconnectedGradientsValueGiven(self):
  526. // with ops.Graph().as_default():
  527. // x = constant(1.0)
  528. // y = constant(1.0)
  529. // with self.assertRaisesRegexp(
  530. // ValueError, "Unknown value for unconnected_gradients: 'nonsense'"):
  531. // gradients.gradients([y], [x], unconnected_gradients="nonsense")
  532. }
  533. /*
  534. */
  535. }
  536. }