Browse Source

!148 Fix error of Mech Updater

From: @pkuliuliu
Reviewed-by: @jxlang910,@zhidanliu
Signed-off-by: @jxlang910
tags/v1.1.0
mindspore-ci-bot Gitee 4 years ago
parent
commit
f2acae7bc3
2 changed files with 16 additions and 1 deletions
  1. +15
    -0
      mindarmour/privacy/diff_privacy/mechanisms/mechanisms.py
  2. +1
    -1
      mindarmour/privacy/diff_privacy/train/model.py

+ 15
- 0
mindarmour/privacy/diff_privacy/mechanisms/mechanisms.py View File

@@ -284,6 +284,21 @@ class NoiseAdaGaussianRandom(NoiseGaussianRandom):
"get {}".format(decay_policy))
self._decay_policy = decay_policy

def construct(self, gradients):
"""
Generated Adaptive Gaussian noise.

Args:
gradients(Tensor): The gradients.

Returns:
Tensor, generated noise with shape like given gradients.
"""
shape = P.Shape()(gradients)
stddev = P.Mul()(self._norm_bound, self._noise_multiplier)
noise = normal(shape, self._mean, stddev, self._seed)
return noise


class _MechanismsParamsUpdater(Cell):
"""


+ 1
- 1
mindarmour/privacy/diff_privacy/train/model.py View File

@@ -515,7 +515,7 @@ class _TrainOneStepWithLossScaleCell(Cell):
if self._noise_mech is not None:
grad_noise_tuple = ()
for grad_item in grads:
grad_noise = self._mech(grad_item)
grad_noise = self._noise_mech(grad_item)
grad_noise_tuple = grad_noise_tuple + (grad_noise,)
grads = self._tuple_add(grads, grad_noise_tuple)
grads = self._hyper_map(F.partial(_grad_scale, self._micro_float),


Loading…
Cancel
Save