Browse Source

fix pylint warning.

tags/v0.3.0-alpha
zheng-huanhuan xue 5 years ago
parent
commit
febaf7a253
46 changed files with 225 additions and 280 deletions
  1. +4
    -5
      example/mnist_demo/lenet5_mnist_coverage.py
  2. +9
    -8
      example/mnist_demo/lenet5_mnist_fuzzing.py
  3. +4
    -5
      example/mnist_demo/mnist_attack_cw.py
  4. +4
    -5
      example/mnist_demo/mnist_attack_deepfool.py
  5. +4
    -6
      example/mnist_demo/mnist_attack_fgsm.py
  6. +7
    -8
      example/mnist_demo/mnist_attack_genetic.py
  7. +9
    -10
      example/mnist_demo/mnist_attack_hsja.py
  8. +6
    -7
      example/mnist_demo/mnist_attack_jsma.py
  9. +6
    -7
      example/mnist_demo/mnist_attack_lbfgs.py
  10. +6
    -7
      example/mnist_demo/mnist_attack_mdi2fgsm.py
  11. +6
    -7
      example/mnist_demo/mnist_attack_nes.py
  12. +4
    -6
      example/mnist_demo/mnist_attack_pgd.py
  13. +7
    -8
      example/mnist_demo/mnist_attack_pointwise.py
  14. +5
    -6
      example/mnist_demo/mnist_attack_pso.py
  15. +7
    -8
      example/mnist_demo/mnist_attack_salt_and_pepper.py
  16. +2
    -5
      example/mnist_demo/mnist_defense_nad.py
  17. +8
    -8
      example/mnist_demo/mnist_evaluation.py
  18. +6
    -7
      example/mnist_demo/mnist_similarity_detector.py
  19. +4
    -6
      example/mnist_demo/mnist_train.py
  20. +2
    -4
      mindarmour/attacks/gradient_method.py
  21. +1
    -2
      mindarmour/attacks/iterative_gradient_method.py
  22. +4
    -5
      mindarmour/attacks/lbfgs.py
  23. +15
    -14
      mindarmour/fuzzing/fuzzing.py
  24. +3
    -6
      mindarmour/utils/util.py
  25. +3
    -7
      tests/st/resnet50/resnet_cifar10.py
  26. +0
    -1
      tests/st/resnet50/test_cifar10_attack_fgsm.py
  27. +4
    -6
      tests/ut/python/attacks/black/test_genetic_attack.py
  28. +7
    -7
      tests/ut/python/attacks/black/test_hsja.py
  29. +5
    -5
      tests/ut/python/attacks/black/test_nes.py
  30. +5
    -6
      tests/ut/python/attacks/black/test_pointwise_attack.py
  31. +0
    -4
      tests/ut/python/attacks/test_gradient_method.py
  32. +6
    -8
      tests/ut/python/attacks/test_iterative_gradient_method.py
  33. +3
    -3
      tests/ut/python/attacks/test_lbfgs.py
  34. +3
    -5
      tests/ut/python/defenses/mock_net.py
  35. +4
    -5
      tests/ut/python/defenses/test_ad.py
  36. +5
    -6
      tests/ut/python/defenses/test_ead.py
  37. +4
    -5
      tests/ut/python/defenses/test_nad.py
  38. +4
    -5
      tests/ut/python/defenses/test_pad.py
  39. +0
    -1
      tests/ut/python/detectors/black/test_similarity_detector.py
  40. +0
    -3
      tests/ut/python/detectors/test_spatial_smoothing.py
  41. +8
    -8
      tests/ut/python/evaluations/black/test_black_defense_eval.py
  42. +5
    -5
      tests/ut/python/evaluations/test_attack_eval.py
  43. +1
    -2
      tests/ut/python/evaluations/test_radar_metric.py
  44. +1
    -1
      tests/ut/python/fuzzing/test_coverage_metrics.py
  45. +16
    -17
      tests/ut/python/fuzzing/test_fuzzing.py
  46. +8
    -10
      tests/ut/python/utils/test_image_transform.py

+ 4
- 5
example/mnist_demo/lenet5_mnist_coverage.py View File

@@ -12,18 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import numpy as np

import numpy as np
from mindspore import Model
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.nn import SoftmaxCrossEntropyWithLogits
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from lenet5_net import LeNet5
from mindarmour.attacks.gradient_method import FastGradientSignMethod
from mindarmour.utils.logger import LogUtil
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

sys.path.append("..")
from data_processing import generate_mnist_dataset


+ 9
- 8
example/mnist_demo/lenet5_mnist_fuzzing.py View File

@@ -12,18 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import numpy as np

import numpy as np
from mindspore import Model
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.nn import SoftmaxCrossEntropyWithLogits

from mindarmour.attacks.gradient_method import FastGradientSignMethod
from mindarmour.utils.logger import LogUtil
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.fuzzing.fuzzing import Fuzzing
from lenet5_net import LeNet5
from mindarmour.fuzzing.fuzzing import Fuzzing
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.utils.logger import LogUtil

sys.path.append("..")
from data_processing import generate_mnist_dataset
@@ -81,8 +79,11 @@ def test_lenet_mnist_fuzzing():

model_fuzz_test = Fuzzing(initial_seeds, model, train_images, 20)
failed_tests = model_fuzz_test.fuzzing()
model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())
if failed_tests:
model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())
else:
LOGGER.info(TAG, 'Fuzzing test identifies none failed test')


if __name__ == '__main__':


+ 4
- 5
example/mnist_demo/mnist_attack_cw.py View File

@@ -13,20 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from lenet5_net import LeNet5
from mindarmour.attacks.carlini_wagner import CarliniWagnerL2Attack
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")



+ 4
- 5
example/mnist_demo/mnist_attack_deepfool.py View File

@@ -13,20 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from lenet5_net import LeNet5
from mindarmour.attacks.deep_fool import DeepFool
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")



+ 4
- 6
example/mnist_demo/mnist_attack_fgsm.py View File

@@ -13,21 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from lenet5_net import LeNet5
from mindarmour.attacks.gradient_method import FastGradientSignMethod

from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")



+ 7
- 8
example/mnist_demo/mnist_attack_genetic.py View File

@@ -13,20 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from mindarmour.attacks.black.genetic_attack import GeneticAttack
from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil
from mindarmour.attacks.black.genetic_attack import GeneticAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

@@ -97,8 +96,8 @@ def test_genetic_attack_on_mnist():
per_bounds=0.1, step_size=0.25, temp=0.1,
sparse=True)
targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)):
if targeted_labels[i] == true_labels[i]:
for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10
start_time = time.clock()
success_list, adv_data, query_list = attack.generate(


+ 9
- 10
example/mnist_demo/mnist_attack_hsja.py View File

@@ -12,18 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys

import numpy as np
import pytest

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack
from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5

sys.path.append("..")
from data_processing import generate_mnist_dataset
@@ -64,9 +63,9 @@ def random_target_labels(true_labels):
def create_target_images(dataset, data_labels, target_labels):
res = []
for label in target_labels:
for i in range(len(data_labels)):
if data_labels[i] == label:
res.append(dataset[i])
for data_label, data in zip(data_labels, dataset):
if data_label == label:
res.append(data)
break
return np.array(res)

@@ -126,9 +125,9 @@ def test_hsja_mnist_attack():
target_images = create_target_images(test_images, predict_labels,
target_labels)
attack.set_target_images(target_images)
success_list, adv_data, query_list = attack.generate(test_images, target_labels)
success_list, adv_data, _ = attack.generate(test_images, target_labels)
else:
success_list, adv_data, query_list = attack.generate(test_images, None)
success_list, adv_data, _ = attack.generate(test_images, None)

adv_datas = []
gts = []
@@ -136,7 +135,7 @@ def test_hsja_mnist_attack():
if success:
adv_datas.append(adv)
gts.append(gt)
if len(gts) > 0:
if gts:
adv_datas = np.concatenate(np.asarray(adv_datas), axis=0)
gts = np.asarray(gts)
pred_logits_adv = model.predict(adv_datas)


+ 6
- 7
example/mnist_demo/mnist_attack_jsma.py View File

@@ -13,20 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from lenet5_net import LeNet5
from mindarmour.attacks.jsma import JSMAAttack
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

@@ -79,8 +78,8 @@ def test_jsma_attack():
predict_labels = np.concatenate(predict_labels)
true_labels = np.concatenate(test_labels)
targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)):
if targeted_labels[i] == true_labels[i]:
for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10
accuracy = np.mean(np.equal(predict_labels, true_labels))
LOGGER.info(TAG, "prediction accuracy before attacking is : %g", accuracy)


+ 6
- 7
example/mnist_demo/mnist_attack_lbfgs.py View File

@@ -13,20 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from lenet5_net import LeNet5
from mindarmour.attacks.lbfgs import LBFGS
from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

@@ -85,8 +84,8 @@ def test_lbfgs_attack():
is_targeted = True
if is_targeted:
targeted_labels = np.random.randint(0, 10, size=len(true_labels)).astype(np.int32)
for i in range(len(true_labels)):
if targeted_labels[i] == true_labels[i]:
for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10
else:
targeted_labels = true_labels.astype(np.int32)


+ 6
- 7
example/mnist_demo/mnist_attack_mdi2fgsm.py View File

@@ -13,21 +13,20 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from mindarmour.attacks.iterative_gradient_method import MomentumDiverseInputIterativeMethod

from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from scipy.special import softmax

from lenet5_net import LeNet5
from mindarmour.attacks.iterative_gradient_method import \
MomentumDiverseInputIterativeMethod
from mindarmour.evaluations.attack_evaluation import AttackEvaluate
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")



+ 6
- 7
example/mnist_demo/mnist_attack_nes.py View File

@@ -12,18 +12,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys

import numpy as np
import pytest

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from mindarmour.attacks.black.natural_evolutionary_strategy import NES
from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.natural_evolutionary_strategy import NES
from mindarmour.utils.logger import LogUtil
from lenet5_net import LeNet5

sys.path.append("..")
from data_processing import generate_mnist_dataset
@@ -73,9 +72,9 @@ def _pseudorandom_target(index, total_indices, true_class):
def create_target_images(dataset, data_labels, target_labels):
res = []
for label in target_labels:
for i in range(len(data_labels)):
if data_labels[i] == label:
res.append(dataset[i])
for data_label, data in zip(data_labels, dataset):
if data_label == label:
res.append(data)
break
return np.array(res)



+ 4
- 6
example/mnist_demo/mnist_attack_pgd.py View File

@@ -13,21 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from lenet5_net import LeNet5
from mindarmour.attacks.iterative_gradient_method import ProjectedGradientDescent

from mindarmour.utils.logger import LogUtil
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")



+ 7
- 8
example/mnist_demo/mnist_attack_pointwise.py View File

@@ -12,20 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from mindarmour.attacks.black.pointwise_attack import PointWiseAttack
from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil
from mindarmour.attacks.black.pointwise_attack import PointWiseAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

@@ -99,8 +98,8 @@ def test_pointwise_attack_on_mnist():
attack = PointWiseAttack(model=model, is_targeted=is_target)
if is_target:
targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)):
if targeted_labels[i] == true_labels[i]:
for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10
else:
targeted_labels = true_labels


+ 5
- 6
example/mnist_demo/mnist_attack_pso.py View File

@@ -13,20 +13,19 @@
# limitations under the License.
import sys
import time

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from mindarmour.attacks.black.pso_attack import PSOAttack
from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil
from mindarmour.attacks.black.pso_attack import PSOAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")



+ 7
- 8
example/mnist_demo/mnist_attack_salt_and_pepper.py View File

@@ -12,20 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from mindarmour.attacks.black.salt_and_pepper_attack import SaltAndPepperNoiseAttack
from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.utils.logger import LogUtil
from mindarmour.attacks.black.salt_and_pepper_attack import SaltAndPepperNoiseAttack
from mindarmour.evaluations.attack_evaluation import AttackEvaluate

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

@@ -102,8 +101,8 @@ def test_salt_and_pepper_attack_on_mnist():
sparse=True)
if is_target:
targeted_labels = np.random.randint(0, 10, size=len(true_labels))
for i in range(len(true_labels)):
if targeted_labels[i] == true_labels[i]:
for i, true_l in enumerate(true_labels):
if targeted_labels[i] == true_l:
targeted_labels[i] = (targeted_labels[i] + 1) % 10
else:
targeted_labels = true_labels


+ 2
- 5
example/mnist_demo/mnist_defense_nad.py View File

@@ -12,25 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""defense example using nad"""
import sys

import logging
import sys

import numpy as np
import pytest

from mindspore import Tensor
from mindspore import context
from mindspore import nn
from mindspore.nn import SoftmaxCrossEntropyWithLogits
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from lenet5_net import LeNet5
from mindarmour.attacks import FastGradientSignMethod
from mindarmour.defenses import NaturalAdversarialDefense
from mindarmour.utils.logger import LogUtil

from lenet5_net import LeNet5

sys.path.append("..")
from data_processing import generate_mnist_dataset



+ 8
- 8
example/mnist_demo/mnist_evaluation.py View File

@@ -12,30 +12,30 @@
# See the License for the specific language governing permissions and
# limitations under the License.
"""evaluate example"""
import sys
import os
import sys
import time
import numpy as np
from scipy.special import softmax
from lenet5_net import LeNet5
import numpy as np
from mindspore import Model
from mindspore import Tensor
from mindspore import context
from mindspore import nn
from mindspore.nn import Cell
from mindspore.ops.operations import TensorAdd
from mindspore.nn import SoftmaxCrossEntropyWithLogits
from mindspore.ops.operations import TensorAdd
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax
from lenet5_net import LeNet5
from mindarmour.attacks import FastGradientSignMethod
from mindarmour.attacks import GeneticAttack
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.defenses import NaturalAdversarialDefense
from mindarmour.detectors.black.similarity_detector import SimilarityDetector
from mindarmour.evaluations import BlackDefenseEvaluate
from mindarmour.evaluations import DefenseEvaluate
from mindarmour.utils.logger import LogUtil
from mindarmour.detectors.black.similarity_detector import SimilarityDetector
sys.path.append("..")
from data_processing import generate_mnist_dataset
@@ -237,7 +237,7 @@ def test_black_defense():
# gen black-box adversarial examples of test data
for idx in range(attacked_size):
raw_st = time.time()
raw_sl, raw_a, raw_qc = attack_rm.generate(
_, raw_a, raw_qc = attack_rm.generate(
np.expand_dims(attacked_sample[idx], axis=0),
np.expand_dims(attack_target_label[idx], axis=0))
raw_t = time.time() - raw_st
@@ -271,7 +271,7 @@ def test_black_defense():
sparse=False)
for idx in range(attacked_size):
def_st = time.time()
def_sl, def_a, def_qc = attack_dm.generate(
_, def_a, def_qc = attack_dm.generate(
np.expand_dims(attacked_sample[idx], axis=0),
np.expand_dims(attack_target_label[idx], axis=0))
def_t = time.time() - def_st


+ 6
- 7
example/mnist_demo/mnist_similarity_detector.py View File

@@ -12,23 +12,22 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import sys

import numpy as np
import pytest
from scipy.special import softmax

from mindspore import Model
from mindspore import context
from mindspore import Tensor
from mindspore import context
from mindspore.nn import Cell
from mindspore.ops.operations import TensorAdd
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from scipy.special import softmax

from mindarmour.utils.logger import LogUtil
from mindarmour.attacks.black.pso_attack import PSOAttack
from lenet5_net import LeNet5
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.pso_attack import PSOAttack
from mindarmour.detectors.black.similarity_detector import SimilarityDetector

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")



+ 4
- 6
example/mnist_demo/mnist_train.py View File

@@ -11,20 +11,18 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import os
import sys

import mindspore.nn as nn
from mindspore import context, Tensor
from mindspore import context
from mindspore.nn.metrics import Accuracy
from mindspore.train import Model
from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindspore.train import Model
from mindspore.nn.metrics import Accuracy

from mindarmour.utils.logger import LogUtil

from lenet5_net import LeNet5
from mindarmour.utils.logger import LogUtil

sys.path.append("..")
from data_processing import generate_mnist_dataset


+ 2
- 4
mindarmour/attacks/gradient_method.py View File

@@ -183,8 +183,7 @@ class FastGradientMethod(GradientMethod):
>>> grad = self._gradient([[0.2, 0.3, 0.4]],
>>> [[0, 1, 0, 0, 0, 0, 0, 0, 0, 0])
"""
sens = Tensor(np.array([1.0], self._dtype))
out_grad = self._grad_all(Tensor(inputs), Tensor(labels), sens)
out_grad = self._grad_all(Tensor(inputs), Tensor(labels))
if isinstance(out_grad, tuple):
out_grad = out_grad[0]
gradient = out_grad.asnumpy()
@@ -286,8 +285,7 @@ class FastGradientSignMethod(GradientMethod):
>>> grad = self._gradient([[0.2, 0.3, 0.4]],
>>> [[0, 1, 0, 0, 0, 0, 0, 0, 0, 0])
"""
sens = Tensor(np.array([1.0], self._dtype))
out_grad = self._grad_all(Tensor(inputs), Tensor(labels), sens)
out_grad = self._grad_all(Tensor(inputs), Tensor(labels))
if isinstance(out_grad, tuple):
out_grad = out_grad[0]
gradient = out_grad.asnumpy()


+ 1
- 2
mindarmour/attacks/iterative_gradient_method.py View File

@@ -351,9 +351,8 @@ class MomentumIterativeMethod(IterativeGradientMethod):
>>> grad = self._gradient([[0.5, 0.3, 0.4]],
>>> [[0, 0, 0, 1, 0, 0, 0, 0, 0, 0])
"""
sens = Tensor(np.array([1.0], inputs.dtype))
# get grad of loss over x
out_grad = self._loss_grad(Tensor(inputs), Tensor(labels), sens)
out_grad = self._loss_grad(Tensor(inputs), Tensor(labels))
if isinstance(out_grad, tuple):
out_grad = out_grad[0]
gradient = out_grad.asnumpy()


+ 4
- 5
mindarmour/attacks/lbfgs.py View File

@@ -115,12 +115,11 @@ class LBFGS(Attack):
def _gradient(self, cur_input, labels, shape):
""" Return model gradient to minimize loss in l-bfgs-b."""
label_dtype = labels.dtype
sens = Tensor(np.array([1], self._dtype))
labels = np.expand_dims(labels, axis=0).astype(label_dtype)
# input shape should like original shape
reshape_input = np.expand_dims(cur_input.reshape(shape),
axis=0)
out_grad = self._grad_all(Tensor(reshape_input), Tensor(labels), sens)
out_grad = self._grad_all(Tensor(reshape_input), Tensor(labels))
if isinstance(out_grad, tuple):
out_grad = out_grad[0]
return out_grad.asnumpy()
@@ -131,9 +130,9 @@ class LBFGS(Attack):
the cross-entropy loss.
"""
cur_input = cur_input.astype(self._dtype)
l2_distance = np.linalg.norm(cur_input.reshape(
(cur_input.shape[0], -1)) - start_input.reshape(
(start_input.shape[0], -1)))
l2_distance = np.linalg.norm(
cur_input.reshape((cur_input.shape[0], -1)) - start_input.reshape(
(start_input.shape[0], -1)))
logits = self._forward_one(cur_input.reshape(shape)).flatten()
logits = logits - np.max(logits)
if self._sparse:


+ 15
- 14
mindarmour/fuzzing/fuzzing.py View File

@@ -14,17 +14,17 @@
"""
Fuzzing.
"""
import numpy as np
from random import choice

from mindspore import Tensor
import numpy as np
from mindspore import Model
from mindspore import Tensor

from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.utils.image_transform import Contrast, Brightness, Blur, Noise, \
Translate, Scale, Shear, Rotate
from mindarmour.utils._check_param import check_model, check_numpy_param, \
check_int_positive
from mindarmour.utils.image_transform import Contrast, Brightness, Blur, Noise, \
Translate, Scale, Shear, Rotate


class Fuzzing:
@@ -40,9 +40,10 @@ class Fuzzing:
target_model (Model): Target fuzz model.
train_dataset (numpy.ndarray): Training dataset used for determine
the neurons' output boundaries.
const_K (int): The number of mutate tests for a seed.
const_k (int): The number of mutate tests for a seed.
mode (str): Image mode used in image transform, 'L' means grey graph.
Default: 'L'.
max_seed_num (int): The initial seeds max value. Default: 1000
"""

def __init__(self, initial_seeds, target_model, train_dataset, const_K,
@@ -50,7 +51,7 @@ class Fuzzing:
self.initial_seeds = initial_seeds
self.target_model = check_model('model', target_model, Model)
self.train_dataset = check_numpy_param('train_dataset', train_dataset)
self.K = check_int_positive('const_k', const_K)
self.const_k = check_int_positive('const_k', const_K)
self.mode = mode
self.max_seed_num = check_int_positive('max_seed_num', max_seed_num)
self.coverage_metrics = ModelCoverageMetrics(target_model, 1000, 10,
@@ -73,7 +74,7 @@ class Fuzzing:
'Noise': Noise,
'Translate': Translate, 'Scale': Scale, 'Shear': Shear,
'Rotate': Rotate}
for _ in range(self.K):
for _ in range(self.const_k):
for _ in range(try_num):
if (info[0] == info[1]).all():
trans_strage = self._random_pick_mutate(affine_trans,
@@ -91,7 +92,7 @@ class Fuzzing:
if trans_strage in affine_trans:
info[1] = mutate_test
mutate_tests.append(mutate_test)
if len(mutate_tests) == 0:
if not mutate_tests:
mutate_tests.append(seed)
return np.array(mutate_tests)

@@ -109,7 +110,7 @@ class Fuzzing:
seed = self._select_next()
failed_tests = []
seed_num = 0
while len(seed) > 0 and seed_num < self.max_seed_num:
while seed and seed_num < self.max_seed_num:
mutate_tests = self._metamorphic_mutate(seed[0])
coverages, results = self._run(mutate_tests, coverage_metric)
coverage_gains = self._coverage_gains(coverages)
@@ -157,13 +158,13 @@ class Fuzzing:
beta = 0.2
diff = np.array(seed - mutate_test).flatten()
size = np.shape(diff)[0]
L0 = np.linalg.norm(diff, ord=0)
Linf = np.linalg.norm(diff, ord=np.inf)
if L0 > alpha*size:
if Linf < 256:
l0 = np.linalg.norm(diff, ord=0)
linf = np.linalg.norm(diff, ord=np.inf)
if l0 > alpha*size:
if linf < 256:
is_valid = True
else:
if Linf < beta*255:
if linf < beta*255:
is_valid = True

return is_valid

+ 3
- 6
mindarmour/utils/util.py View File

@@ -13,7 +13,6 @@
# limitations under the License.
""" Util for MindArmour. """
import numpy as np

from mindspore import Tensor
from mindspore.nn import Cell
from mindspore.ops.composite import GradOperation
@@ -99,23 +98,21 @@ class GradWrapWithLoss(Cell):
super(GradWrapWithLoss, self).__init__()
self._grad_all = GradOperation(name="get_all",
get_all=True,
sens_param=True)
sens_param=False)
self._network = network

def construct(self, inputs, labels, weight):
def construct(self, inputs, labels):
"""
Compute gradient of `inputs` with labels and weight.

Args:
inputs (Tensor): Inputs of network.
labels (Tensor): Labels of inputs.
weight (Tensor): Weight of each gradient, `weight` has the same
shape with labels.

Returns:
Tensor, gradient matrix.
"""
gout = self._grad_all(self._network)(inputs, labels, weight)
gout = self._grad_all(self._network)(inputs, labels)
return gout[0]




+ 3
- 7
tests/st/resnet50/resnet_cifar10.py View File

@@ -12,12 +12,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import math

from mindspore import nn
from mindspore.ops import operations as P
from mindspore.common.tensor import Tensor
from mindspore import context
from mindspore.ops import operations as P


def variance_scaling_raw(shape):
@@ -110,8 +107,7 @@ class ResidualBlock(nn.Cell):
def __init__(self,
in_channels,
out_channels,
stride=1,
down_sample=False):
stride=1):
super(ResidualBlock, self).__init__()

out_chls = out_channels // self.expansion
@@ -168,7 +164,7 @@ class ResidualBlockWithDown(nn.Cell):
self.bn3 = bn_with_initialize_last(out_channels)

self.relu = P.ReLU()
self.downSample = down_sample
self.downsample = down_sample

self.conv_down_sample = conv1x1(in_channels, out_channels, stride=stride, padding=0)
self.bn_down_sample = bn_with_initialize(out_channels)


+ 0
- 1
tests/st/resnet50/test_cifar10_attack_fgsm.py View File

@@ -18,7 +18,6 @@ Fuction:
Usage:
py.test test_cifar10_attack_fgsm.py
"""
import os
import numpy as np

import pytest


+ 4
- 6
tests/ut/python/attacks/black/test_genetic_attack.py View File

@@ -16,15 +16,13 @@ Genetic-Attack test.
"""
import numpy as np
import pytest

import mindspore.ops.operations as M
from mindspore import Tensor
from mindspore.nn import Cell
from mindspore import context
from mindspore.nn import Cell

from mindarmour.attacks.black.genetic_attack import GeneticAttack
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.genetic_attack import GeneticAttack

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

@@ -115,7 +113,7 @@ def test_supplement():
adaptive=True,
sparse=False)
# raise error
_, adv_data, _ = attack.generate(inputs, labels)
_, _, _ = attack.generate(inputs, labels)


@pytest.mark.level0
@@ -140,5 +138,5 @@ def test_value_error():
adaptive=True,
sparse=False)
# raise error
with pytest.raises(ValueError) as e:
with pytest.raises(ValueError):
assert attack.generate(inputs, labels)

+ 7
- 7
tests/ut/python/attacks/black/test_hsja.py View File

@@ -11,19 +11,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import sys

import numpy as np
import pytest

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.hop_skip_jump_attack import HopSkipJumpAttack
from mindarmour.utils.logger import LogUtil

sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"../../../../../"))
from example.mnist_demo.lenet5_net import LeNet5
@@ -135,7 +135,7 @@ def test_hsja_mnist_attack():
attack.set_target_images(target_images)
success_list, adv_data, _ = attack.generate(test_images, target_labels)
else:
success_list, adv_data, query_list = attack.generate(test_images, None)
success_list, adv_data, _ = attack.generate(test_images, None)
assert (adv_data != test_images).any()

adv_datas = []
@@ -144,7 +144,7 @@ def test_hsja_mnist_attack():
if success:
adv_datas.append(adv)
gts.append(gt)
if len(gts) > 0:
if gts:
adv_datas = np.concatenate(np.asarray(adv_datas), axis=0)
gts = np.asarray(gts)
pred_logits_adv = model.predict(adv_datas)
@@ -162,5 +162,5 @@ def test_hsja_mnist_attack():
def test_value_error():
model = get_model()
norm = 'l2'
with pytest.raises(ValueError) as e:
with pytest.raises(ValueError):
assert HopSkipJumpAttack(model, constraint=norm, stepsize_search='bad-search')

+ 5
- 5
tests/ut/python/attacks/black/test_nes.py View File

@@ -11,19 +11,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys

import numpy as np
import os
import pytest

from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from mindarmour.attacks.black.natural_evolutionary_strategy import NES
from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.natural_evolutionary_strategy import NES
from mindarmour.utils.logger import LogUtil

sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"../../../../../"))
from example.mnist_demo.lenet5_net import LeNet5
@@ -156,7 +156,7 @@ def nes_mnist_attack(scene, top_k):
assert (advs != test_images[:batch_num]).any()

adv_pred = np.argmax(model.predict(advs), axis=1)
adv_accuracy = np.mean(np.equal(adv_pred, true_labels[:test_length]))
_ = np.mean(np.equal(adv_pred, true_labels[:test_length]))


@pytest.mark.level0


+ 5
- 6
tests/ut/python/attacks/black/test_pointwise_attack.py View File

@@ -14,19 +14,18 @@
"""
PointWise Attack test
"""
import sys
import os
import sys

import numpy as np
import pytest


from mindspore import Tensor
from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net

from mindarmour.attacks.black.black_model import BlackModel
from mindarmour.attacks.black.pointwise_attack import PointWiseAttack
from mindarmour.utils.logger import LogUtil
from mindarmour.attacks.black.black_model import BlackModel

sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"../../../../../"))
@@ -75,13 +74,13 @@ def test_pointwise_attack_method():
input_np = np.load(os.path.join(current_dir,
'../../test_data/test_images.npy'))[:3]
labels = np.load(os.path.join(current_dir,
'../../test_data/test_labels.npy'))[:3]
'../../test_data/test_labels.npy'))[:3]
model = ModelToBeAttacked(net)
pre_label = np.argmax(model.predict(input_np), axis=1)
LOGGER.info(TAG, 'original sample predict labels are :{}'.format(pre_label))
LOGGER.info(TAG, 'true labels are: {}'.format(labels))
attack = PointWiseAttack(model, sparse=True, is_targeted=False)
is_adv, adv_data, query_times = attack.generate(input_np, pre_label)
is_adv, adv_data, _ = attack.generate(input_np, pre_label)
LOGGER.info(TAG, 'adv sample predict labels are: {}'
.format(np.argmax(model.predict(adv_data), axis=1)))



+ 0
- 4
tests/ut/python/attacks/test_gradient_method.py View File

@@ -233,10 +233,6 @@ def test_assert_error():
"""
Random least likely class method unit test.
"""
input_np = np.asarray([[0.1, 0.2, 0.7]], np.float32)
label = np.asarray([2], np.int32)
label = np.eye(3)[label].astype(np.float32)

with pytest.raises(ValueError) as e:
assert RandomLeastLikelyClassMethod(Net(), eps=0.05, alpha=0.21)
assert str(e.value) == 'eps must be larger than alpha!'

+ 6
- 8
tests/ut/python/attacks/test_iterative_gradient_method.py View File

@@ -134,10 +134,9 @@ def test_diverse_input_iterative_method():
label = np.asarray([2], np.int32)
label = np.eye(3)[label].astype(np.float32)

for i in range(5):
attack = DiverseInputIterativeMethod(Net())
ms_adv_x = attack.generate(input_np, label)
assert np.any(ms_adv_x != input_np), 'Diverse input iterative method: generate' \
attack = DiverseInputIterativeMethod(Net())
ms_adv_x = attack.generate(input_np, label)
assert np.any(ms_adv_x != input_np), 'Diverse input iterative method: generate' \
' value must not be equal to' \
' original value.'

@@ -155,10 +154,9 @@ def test_momentum_diverse_input_iterative_method():
label = np.asarray([2], np.int32)
label = np.eye(3)[label].astype(np.float32)

for i in range(5):
attack = MomentumDiverseInputIterativeMethod(Net())
ms_adv_x = attack.generate(input_np, label)
assert np.any(ms_adv_x != input_np), 'Momentum diverse input iterative method: ' \
attack = MomentumDiverseInputIterativeMethod(Net())
ms_adv_x = attack.generate(input_np, label)
assert np.any(ms_adv_x != input_np), 'Momentum diverse input iterative method: ' \
'generate value must not be equal to' \
' original value.'



+ 3
- 3
tests/ut/python/attacks/test_lbfgs.py View File

@@ -14,11 +14,11 @@
"""
LBFGS-Attack test.
"""
import os
import sys

import numpy as np
import pytest
import os

from mindspore import context
from mindspore.train.serialization import load_checkpoint, load_param_into_net

@@ -69,4 +69,4 @@ def test_lbfgs_attack():

attack = LBFGS(net, is_targeted=True)
LOGGER.debug(TAG, 'target_np is :{}'.format(target_np[0]))
adv_data = attack.generate(input_np, target_np)
_ = attack.generate(input_np, target_np)

+ 3
- 5
tests/ut/python/defenses/mock_net.py View File

@@ -18,10 +18,8 @@ import numpy as np

from mindspore import nn
from mindspore import Tensor
from mindspore.nn import Cell
from mindspore.nn import WithLossCell, TrainOneStepCell
from mindspore.nn.optim.momentum import Momentum
from mindspore.ops import operations as P
from mindspore import context
from mindspore.common.initializer import TruncatedNormal

@@ -58,7 +56,7 @@ class Net(nn.Cell):
self.fc3 = fc_with_initialize(84, 10)
self.relu = nn.ReLU()
self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
self.reshape = P.Reshape()
self.flatten = nn.Flatten()

def construct(self, x):
x = self.conv1(x)
@@ -67,7 +65,7 @@ class Net(nn.Cell):
x = self.conv2(x)
x = self.relu(x)
x = self.max_pool2d(x)
x = self.reshape(x, (-1, 16*5*5))
x = self.flatten(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
@@ -75,6 +73,7 @@ class Net(nn.Cell):
x = self.fc3(x)
return x


if __name__ == '__main__':
num_classes = 10
batch_size = 32
@@ -104,4 +103,3 @@ if __name__ == '__main__':
train_net.set_train()

train_net(Tensor(inputs_np), Tensor(labels_np))


+ 4
- 5
tests/ut/python/defenses/test_ad.py View File

@@ -14,20 +14,19 @@
"""
Adversarial defense test.
"""
import numpy as np
import pytest
import logging

from mindspore import nn
import numpy as np
import pytest
from mindspore import Tensor
from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum

from mock_net import Net
from mindarmour.defenses.adversarial_defense import AdversarialDefense
from mindarmour.utils.logger import LogUtil

from mock_net import Net

LOGGER = LogUtil.get_instance()
TAG = 'Ad_Test'



+ 5
- 6
tests/ut/python/defenses/test_ead.py View File

@@ -14,22 +14,21 @@
"""
ensemble adversarial defense test.
"""
import numpy as np
import pytest
import logging

from mindspore import nn
import numpy as np
import pytest
from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum

from mock_net import Net
from mindarmour.attacks.gradient_method import FastGradientSignMethod
from mindarmour.attacks.iterative_gradient_method import \
ProjectedGradientDescent
from mindarmour.defenses.adversarial_defense import EnsembleAdversarialDefense
from mindarmour.utils.logger import LogUtil

from mock_net import Net

LOGGER = LogUtil.get_instance()
TAG = 'Ead_Test'

@@ -54,7 +53,7 @@ def test_ead():
if not sparse:
labels = np.eye(num_classes)[labels].astype(np.float32)

net = Net()
net = SimpleNet()
loss_fn = nn.SoftmaxCrossEntropyWithLogits(is_grad=False, sparse=sparse)
optimizer = Momentum(net.trainable_params(), 0.001, 0.9)



+ 4
- 5
tests/ut/python/defenses/test_nad.py View File

@@ -14,20 +14,19 @@
"""
Natural adversarial defense test.
"""
import numpy as np
import pytest
import logging

from mindspore import nn
import numpy as np
import pytest
from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum

from mock_net import Net
from mindarmour.defenses.natural_adversarial_defense import \
NaturalAdversarialDefense
from mindarmour.utils.logger import LogUtil

from mock_net import Net

LOGGER = LogUtil.get_instance()
TAG = 'Nad_Test'



+ 4
- 5
tests/ut/python/defenses/test_pad.py View File

@@ -14,20 +14,19 @@
"""
Projected adversarial defense test.
"""
import numpy as np
import pytest
import logging

from mindspore import nn
import numpy as np
import pytest
from mindspore import context
from mindspore import nn
from mindspore.nn.optim.momentum import Momentum

from mock_net import Net
from mindarmour.defenses.projected_adversarial_defense import \
ProjectedAdversarialDefense
from mindarmour.utils.logger import LogUtil

from mock_net import Net

LOGGER = LogUtil.get_instance()
TAG = 'Pad_Test'



+ 0
- 1
tests/ut/python/detectors/black/test_similarity_detector.py View File

@@ -98,4 +98,3 @@ def test_similarity_detector():
1561, 1612, 1663, 1714, 1765,
1816, 1867, 1918, 1969]
assert np.all(detector.get_detected_queries() == expected_value)

+ 0
- 3
tests/ut/python/detectors/test_spatial_smoothing.py View File

@@ -111,6 +111,3 @@ def test_spatial_smoothing_diff():
0.38254014, 0.543059, 0.06452079, 0.36902517,
1.1845329, 0.3870097])
assert np.allclose(diffs, expected_value, 0.0001, 0.0001)




+ 8
- 8
tests/ut/python/evaluations/black/test_black_defense_eval.py View File

@@ -53,14 +53,14 @@ def test_def_eval():

# create obj
def_eval = BlackDefenseEvaluate(raw_preds,
def_preds,
raw_query_counts,
def_query_counts,
raw_query_time,
def_query_time,
def_detection_counts,
true_labels,
max_queries=100)
def_preds,
raw_query_counts,
def_query_counts,
raw_query_time,
def_query_time,
def_detection_counts,
true_labels,
max_queries=100)
# run eval
qcv = def_eval.qcv()
asv = def_eval.asv()


+ 5
- 5
tests/ut/python/evaluations/test_attack_eval.py View File

@@ -30,8 +30,8 @@ def test_attack_eval():
np.random.seed(1024)
inputs = np.random.normal(size=(3, 512, 512, 3))
labels = np.array([[0.1, 0.1, 0.2, 0.6],
[0.1, 0.7, 0.0, 0.2],
[0.8, 0.1, 0.0, 0.1]])
[0.1, 0.7, 0.0, 0.2],
[0.8, 0.1, 0.0, 0.1]])
adv_x = inputs + np.ones((3, 512, 512, 3))*0.001
adv_y = np.array([[0.1, 0.1, 0.2, 0.6],
[0.1, 0.0, 0.8, 0.1],
@@ -63,8 +63,8 @@ def test_value_error():
np.random.seed(1024)
inputs = np.random.normal(size=(3, 512, 512, 3))
labels = np.array([[0.1, 0.1, 0.2, 0.6],
[0.1, 0.7, 0.0, 0.2],
[0.8, 0.1, 0.0, 0.1]])
[0.1, 0.7, 0.0, 0.2],
[0.8, 0.1, 0.0, 0.1]])
adv_x = inputs + np.ones((3, 512, 512, 3))*0.001
adv_y = np.array([[0.1, 0.1, 0.2, 0.6],
[0.1, 0.0, 0.8, 0.1],
@@ -81,7 +81,7 @@ def test_value_error():
@pytest.mark.platform_x86_ascend_training
@pytest.mark.env_card
@pytest.mark.component_mindarmour
def test_value_error():
def test_empty_input_error():
# prepare test data
np.random.seed(1024)
inputs = np.array([])


+ 1
- 2
tests/ut/python/evaluations/test_radar_metric.py View File

@@ -30,7 +30,7 @@ def test_radar_metric():
metrics_labels = ['before', 'after']

# create obj
rm = RadarMetric(metrics_name, metrics_data, metrics_labels, title='',
_ = RadarMetric(metrics_name, metrics_data, metrics_labels, title='',
scale='sparse')


@@ -54,4 +54,3 @@ def test_value_error():
with pytest.raises(ValueError):
assert RadarMetric(['MR', 'ACAC', 'ASS'], metrics_data, metrics_labels,
title='', scale='bad_s')


+ 1
- 1
tests/ut/python/fuzzing/test_coverage_metrics.py View File

@@ -125,4 +125,4 @@ def test_lenet_mnist_coverage_ascend():
bias_coefficient=0.5)
LOGGER.info(TAG, 'KMNC of this test is : %s', model_fuzz_test.get_kmnc())
LOGGER.info(TAG, 'NBC of this test is : %s', model_fuzz_test.get_nbc())
LOGGER.info(TAG, 'SNAC of this test is : %s', model_fuzz_test.get_snac())
LOGGER.info(TAG, 'SNAC of this test is : %s', model_fuzz_test.get_snac())

+ 16
- 17
tests/ut/python/fuzzing/test_fuzzing.py View File

@@ -16,18 +16,15 @@ Model-fuzz coverage test.
"""
import numpy as np
import pytest
import sys

from mindspore.train import Model
from mindspore import nn
from mindspore.ops import operations as P
from mindspore import context
from mindspore import nn
from mindspore.common.initializer import TruncatedNormal
from mindspore.ops import operations as P
from mindspore.train import Model

from mindarmour.utils.logger import LogUtil
from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.fuzzing.fuzzing import Fuzzing

from mindarmour.fuzzing.model_coverage_metrics import ModelCoverageMetrics
from mindarmour.utils.logger import LogUtil

LOGGER = LogUtil.get_instance()
TAG = 'Fuzzing test'
@@ -116,17 +113,18 @@ def test_fuzzing_ascend():
model_fuzz_test = Fuzzing(initial_seeds, model, training_data, 5,
max_seed_num=10)
failed_tests = model_fuzz_test.fuzzing()
model_coverage_test.test_adequacy_coverage_calculate(
np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s',
model_coverage_test.get_kmnc())
if failed_tests:
model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())
else:
LOGGER.info(TAG, 'Fuzzing test identifies none failed test')


@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_fuzzing_ascend():
def test_fuzzing_CPU():
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
# load network
net = Net()
@@ -155,7 +153,8 @@ def test_fuzzing_ascend():
model_fuzz_test = Fuzzing(initial_seeds, model, training_data, 5,
max_seed_num=10)
failed_tests = model_fuzz_test.fuzzing()
model_coverage_test.test_adequacy_coverage_calculate(
np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s',
model_coverage_test.get_kmnc())
if failed_tests:
model_coverage_test.test_adequacy_coverage_calculate(np.array(failed_tests).astype(np.float32))
LOGGER.info(TAG, 'KMNC of this test is : %s', model_coverage_test.get_kmnc())
else:
LOGGER.info(TAG, 'Fuzzing test identifies none failed test')

+ 8
- 10
tests/ut/python/utils/test_image_transform.py View File

@@ -35,7 +35,7 @@ def test_contrast():
mode = 'L'
trans = Contrast(image, mode)
trans.random_param()
trans_image = trans.transform()
_ = trans.transform()


@pytest.mark.level0
@@ -47,7 +47,7 @@ def test_brightness():
mode = 'L'
trans = Brightness(image, mode)
trans.random_param()
trans_image = trans.transform()
_ = trans.transform()


@pytest.mark.level0
@@ -61,7 +61,7 @@ def test_blur():
mode = 'L'
trans = Blur(image, mode)
trans.random_param()
trans_image = trans.transform()
_ = trans.transform()


@pytest.mark.level0
@@ -75,7 +75,7 @@ def test_noise():
mode = 'L'
trans = Noise(image, mode)
trans.random_param()
trans_image = trans.transform()
_ = trans.transform()


@pytest.mark.level0
@@ -89,7 +89,7 @@ def test_translate():
mode = 'L'
trans = Translate(image, mode)
trans.random_param()
trans_image = trans.transform()
_ = trans.transform()


@pytest.mark.level0
@@ -103,7 +103,7 @@ def test_shear():
mode = 'L'
trans = Shear(image, mode)
trans.random_param()
trans_image = trans.transform()
_ = trans.transform()


@pytest.mark.level0
@@ -117,7 +117,7 @@ def test_scale():
mode = 'L'
trans = Scale(image, mode)
trans.random_param()
trans_image = trans.transform()
_ = trans.transform()


@pytest.mark.level0
@@ -131,6 +131,4 @@ def test_rotate():
mode = 'L'
trans = Rotate(image, mode)
trans.random_param()
trans_image = trans.transform()


_ = trans.transform()

Loading…
Cancel
Save