From b0cba0a60658e770a2cc563d924fd450d9a6902c Mon Sep 17 00:00:00 2001 From: Lee Reid Date: Mon, 22 Mar 2021 14:27:07 +0100 Subject: [PATCH] Added gradients for acos, asin, atan --- src/TensorFlowNET.Core/Gradients/math_grad.cs | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/src/TensorFlowNET.Core/Gradients/math_grad.cs b/src/TensorFlowNET.Core/Gradients/math_grad.cs index a071d234..19490445 100644 --- a/src/TensorFlowNET.Core/Gradients/math_grad.cs +++ b/src/TensorFlowNET.Core/Gradients/math_grad.cs @@ -634,6 +634,23 @@ namespace Tensorflow.Gradients }); } + [RegisterGradient("Asin")] + public static Tensor[] _ASinGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var x = op.inputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + x = math_ops.conj(x); + // the derivative of + // y = asin(x) + // is + // d/dx asin(x) = 1 / sqrt(1-x*x) + return new Tensor[] { math_ops.multiply(grad, 1 / gen_math_ops.sqrt(1 - gen_math_ops.square(x))) }; + }); + } + [RegisterGradient("Sin")] public static Tensor[] _SinGrad(Operation op, Tensor[] grads) { @@ -660,6 +677,23 @@ namespace Tensorflow.Gradients }); } + [RegisterGradient("Acos")] + public static Tensor[] _ACosGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var x = op.inputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + // the derivative of + // y = acos(x) + // is + // d/dx acos(x) = -1 / sqrt(1-x*x) = -d/dx asin(x) + x = math_ops.conj(x); + return new Tensor[] { math_ops.multiply(grad, -1 / gen_math_ops.sqrt(1 - gen_math_ops.square(x))) }; + }); + } + [RegisterGradient("Cos")] public static Tensor[] _CosGrad(Operation op, Tensor[] grads) { @@ -686,6 +720,23 @@ namespace Tensorflow.Gradients }); } + [RegisterGradient("Atan")] + public static Tensor[] _ATanGrad(Operation op, Tensor[] grads) + { + var grad = grads[0]; + var x = op.inputs[0]; + + return tf_with(ops.control_dependencies(grads), delegate + { + // the derivative of + // y = atan(x) + // is + // d/dx atan(x) = 1 / (1 + x*x) + x = math_ops.conj(x); + return new Tensor[] { math_ops.multiply(grad, 1 / (1 + gen_math_ops.square(x))) }; + }); + } + [RegisterGradient("Tanh")] public static Tensor[] _TanhGrad(Operation op, Tensor[] grads) {