| @@ -13,7 +13,7 @@ build/ | |||
| dist/ | |||
| local_script/ | |||
| example/dataset/ | |||
| example/mnist_demo/MNIST_unzip/ | |||
| example/mnist_demo/MNIST/ | |||
| example/mnist_demo/trained_ckpt_file/ | |||
| example/mnist_demo/model/ | |||
| example/cifar_demo/model/ | |||
| @@ -1,75 +0,0 @@ | |||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||
| # | |||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||
| # you may not use this file except in compliance with the License. | |||
| # You may obtain a copy of the License at | |||
| # | |||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||
| # | |||
| # Unless required by applicable law or agreed to in writing, software | |||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| # ============================================================================ | |||
| """ | |||
| dataset processing. | |||
| """ | |||
| import os | |||
| from mindspore.common import dtype as mstype | |||
| import mindspore.dataset as de | |||
| import mindspore.dataset.transforms.c_transforms as C | |||
| import mindspore.dataset.transforms.vision.c_transforms as vision | |||
| def vgg_create_dataset100(data_home, image_size, batch_size, rank_id=0, rank_size=1, repeat_num=1, | |||
| training=True, num_samples=None, shuffle=True): | |||
| """Data operations.""" | |||
| de.config.set_seed(1) | |||
| data_dir = os.path.join(data_home, "train") | |||
| if not training: | |||
| data_dir = os.path.join(data_home, "test") | |||
| if num_samples is not None: | |||
| data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id, | |||
| num_samples=num_samples, shuffle=shuffle) | |||
| else: | |||
| data_set = de.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id) | |||
| input_columns = ["fine_label"] | |||
| output_columns = ["label"] | |||
| data_set = data_set.rename(input_columns=input_columns, output_columns=output_columns) | |||
| data_set = data_set.project(["image", "label"]) | |||
| rescale = 1.0 / 255.0 | |||
| shift = 0.0 | |||
| # define map operations | |||
| random_crop_op = vision.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT | |||
| random_horizontal_op = vision.RandomHorizontalFlip() | |||
| resize_op = vision.Resize(image_size) # interpolation default BILINEAR | |||
| rescale_op = vision.Rescale(rescale, shift) | |||
| normalize_op = vision.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) | |||
| changeswap_op = vision.HWC2CHW() | |||
| type_cast_op = C.TypeCast(mstype.int32) | |||
| c_trans = [] | |||
| if training: | |||
| c_trans = [random_crop_op, random_horizontal_op] | |||
| c_trans += [resize_op, rescale_op, normalize_op, | |||
| changeswap_op] | |||
| # apply map operations on images | |||
| data_set = data_set.map(input_columns="label", operations=type_cast_op) | |||
| data_set = data_set.map(input_columns="image", operations=c_trans) | |||
| # apply repeat operations | |||
| data_set = data_set.repeat(repeat_num) | |||
| # apply shuffle operations | |||
| # data_set = data_set.shuffle(buffer_size=1000) | |||
| # apply batch operations | |||
| data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) | |||
| return data_set | |||
| @@ -1,46 +0,0 @@ | |||
| # mnist demo | |||
| ## Introduction | |||
| The MNIST database of handwritten digits, available from this page, has a training set of 60,000 examples, and a test set of 10,000 examples. It is a subset of a larger set available from MNIST. The digits have been size-normalized and centered in a fixed-size image. | |||
| ## run demo | |||
| ### 1. download dataset | |||
| ```sh | |||
| $ cd example/mnist_demo | |||
| $ mkdir MNIST_unzip | |||
| $ cd MNIST_unzip | |||
| $ mkdir train | |||
| $ mkdir test | |||
| $ cd train | |||
| $ wget "http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz" | |||
| $ wget "http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz" | |||
| $ gzip train-images-idx3-ubyte.gz -d | |||
| $ gzip train-labels-idx1-ubyte.gz -d | |||
| $ cd ../test | |||
| $ wget "http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz" | |||
| $ wget "http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz" | |||
| $ gzip t10k-images-idx3-ubyte.gz -d | |||
| $ gzip t10k-images-idx3-ubyte.gz -d | |||
| $ cd ../../ | |||
| ``` | |||
| ### 1. trian model | |||
| ```sh | |||
| $ python mnist_train.py | |||
| ``` | |||
| ### 2. run attack test | |||
| ```sh | |||
| $ mkdir out.data | |||
| $ python mnist_attack_jsma.py | |||
| ``` | |||
| ### 3. run defense/detector test | |||
| ```sh | |||
| $ python mnist_defense_nad.py | |||
| $ python mnist_similarity_detector.py | |||
| ``` | |||
| @@ -0,0 +1,38 @@ | |||
| # Examples | |||
| ## Introduction | |||
| This package includes application demos for all developed tools of MindArmour. Through these demos, you will soon | |||
| master those tools of MindArmour. Let's Start! | |||
| ## Preparation | |||
| Most of those demos are implemented based on LeNet5 and MNIST dataset. As a preparation, we should download MNIST and | |||
| train a LeNet5 model first. | |||
| ### 1. download dataset | |||
| The MNIST database of handwritten digits has a training set of 60,000 examples, and a test set of 10,000 examples | |||
| . It is a subset of a larger set available from MNIST. The digits have been size-normalized and centered in a fixed-size image. | |||
| ```sh | |||
| $ cd examples/common/dataset | |||
| $ mkdir MNIST | |||
| $ cd MNIST | |||
| $ mkdir train | |||
| $ mkdir test | |||
| $ cd train | |||
| $ wget "http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz" | |||
| $ wget "http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz" | |||
| $ gzip train-images-idx3-ubyte.gz -d | |||
| $ gzip train-labels-idx1-ubyte.gz -d | |||
| $ cd ../test | |||
| $ wget "http://yann.lecun.com/exdb/mnist/t10k-images-idx3-ubyte.gz" | |||
| $ wget "http://yann.lecun.com/exdb/mnist/t10k-labels-idx1-ubyte.gz" | |||
| $ gzip t10k-images-idx3-ubyte.gz -d | |||
| $ gzip t10k-images-idx3-ubyte.gz -d | |||
| ``` | |||
| ### 2. trian LeNet5 model | |||
| After training the network, you will obtain a group of ckpt files. Those ckpt files save the trained model parameters | |||
| of LeNet5, which can be used in 'examples/ai_fuzzer' and 'examples/model_security'. | |||
| ```sh | |||
| $ cd examples/common/networks/lenet5 | |||
| $ python mnist_train.py | |||
| ``` | |||
| @@ -0,0 +1,16 @@ | |||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||
| # | |||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||
| # you may not use this file except in compliance with the License. | |||
| # You may obtain a copy of the License at | |||
| # | |||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||
| # | |||
| # Unless required by applicable law or agreed to in writing, software | |||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| """ | |||
| This package includes real application examples for developed features of MindArmour. | |||
| """ | |||
| @@ -0,0 +1,24 @@ | |||
| # Application demos of model fuzzing | |||
| ## Introduction | |||
| The same as the traditional software fuzz testing, we can also design fuzz test for AI models. Compared to | |||
| branch coverage or line coverage of traditional software, some people propose the | |||
| concept of 'neuron coverage' based on the unique structure of deep neural network. We can use the neuron coverage | |||
| as a guide to search more metamorphic inputs to test our models. | |||
| ## 1. calculation of neuron coverage | |||
| There are three metrics proposed for evaluating the neuron coverage of a test:KMNC, NBC and SNAC. Usually we need to | |||
| feed all the training dataset into the model first, and record the output range of all neurons (however, only the last | |||
| layer of neurons are recorded in our method). In the testing phase, we feed test samples into the model, and | |||
| calculate those three metrics mentioned above according to those neurons' output distribution. | |||
| ```sh | |||
| $ cd examples/ai_fuzzer/ | |||
| $ python lenet5_mnist_coverage.py | |||
| ``` | |||
| ## 2. fuzz test for AI model | |||
| We have provided several types of methods for manipulating metamorphic inputs: affine transformation, pixel | |||
| transformation and adversarial attacks. Usually we feed the original samples into the fuzz function as seeds, and | |||
| then metamorphic samples are generated through iterative manipulations. | |||
| ```sh | |||
| $ cd examples/ai_fuzzer/ | |||
| $ python lenet5_mnist_fuzzing.py | |||
| ``` | |||
| @@ -11,21 +11,18 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import numpy as np | |||
| from mindspore import Model | |||
| from mindspore import context | |||
| from mindspore.nn import SoftmaxCrossEntropyWithLogits | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import FastGradientSignMethod | |||
| from mindarmour.fuzz_testing import ModelCoverageMetrics | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| TAG = 'Neuron coverage test' | |||
| @@ -34,14 +31,14 @@ LOGGER.set_level('INFO') | |||
| def test_lenet_mnist_coverage(): | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| model = Model(net) | |||
| # get training data | |||
| data_list = "./MNIST_unzip/train" | |||
| data_list = "../common/dataset/MNIST/train" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size, sparse=True) | |||
| train_images = [] | |||
| @@ -55,7 +52,7 @@ def test_lenet_mnist_coverage(): | |||
| # fuzz test with original test data | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size, sparse=True) | |||
| test_images = [] | |||
| @@ -11,20 +11,17 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import numpy as np | |||
| from mindspore import Model | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.fuzz_testing import Fuzzer | |||
| from mindarmour.fuzz_testing import ModelCoverageMetrics | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| TAG = 'Fuzz_test' | |||
| @@ -33,9 +30,9 @@ LOGGER.set_level('INFO') | |||
| def test_lenet_mnist_fuzzing(): | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| model = Model(net) | |||
| mutate_config = [{'method': 'Blur', | |||
| @@ -58,7 +55,7 @@ def test_lenet_mnist_fuzzing(): | |||
| ] | |||
| # get training data | |||
| data_list = "./MNIST_unzip/train" | |||
| data_list = "../common/dataset/MNIST/train" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size, sparse=False) | |||
| train_images = [] | |||
| @@ -72,7 +69,7 @@ def test_lenet_mnist_fuzzing(): | |||
| # fuzz test with original test data | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size, sparse=False) | |||
| test_images = [] | |||
| @@ -11,10 +11,12 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import os | |||
| import mindspore.dataset as ds | |||
| import mindspore.dataset.transforms.vision.c_transforms as CV | |||
| import mindspore.dataset.vision.c_transforms as CV | |||
| import mindspore.dataset.transforms.c_transforms as C | |||
| from mindspore.dataset.transforms.vision import Inter | |||
| from mindspore.dataset.vision import Inter | |||
| import mindspore.common.dtype as mstype | |||
| @@ -60,3 +62,56 @@ def generate_mnist_dataset(data_path, batch_size=32, repeat_size=1, | |||
| ds1 = ds1.repeat(repeat_size) | |||
| return ds1 | |||
| def vgg_create_dataset100(data_home, image_size, batch_size, rank_id=0, rank_size=1, repeat_num=1, | |||
| training=True, num_samples=None, shuffle=True): | |||
| """Data operations.""" | |||
| ds.config.set_seed(1) | |||
| data_dir = os.path.join(data_home, "train") | |||
| if not training: | |||
| data_dir = os.path.join(data_home, "test") | |||
| if num_samples is not None: | |||
| data_set = ds.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id, | |||
| num_samples=num_samples, shuffle=shuffle) | |||
| else: | |||
| data_set = ds.Cifar100Dataset(data_dir, num_shards=rank_size, shard_id=rank_id) | |||
| input_columns = ["fine_label"] | |||
| output_columns = ["label"] | |||
| data_set = data_set.rename(input_columns=input_columns, output_columns=output_columns) | |||
| data_set = data_set.project(["image", "label"]) | |||
| rescale = 1.0 / 255.0 | |||
| shift = 0.0 | |||
| # define map operations | |||
| random_crop_op = CV.RandomCrop((32, 32), (4, 4, 4, 4)) # padding_mode default CONSTANT | |||
| random_horizontal_op = CV.RandomHorizontalFlip() | |||
| resize_op = CV.Resize(image_size) # interpolation default BILINEAR | |||
| rescale_op = CV.Rescale(rescale, shift) | |||
| normalize_op = CV.Normalize((0.4465, 0.4822, 0.4914), (0.2010, 0.1994, 0.2023)) | |||
| changeswap_op = CV.HWC2CHW() | |||
| type_cast_op = C.TypeCast(mstype.int32) | |||
| c_trans = [] | |||
| if training: | |||
| c_trans = [random_crop_op, random_horizontal_op] | |||
| c_trans += [resize_op, rescale_op, normalize_op, | |||
| changeswap_op] | |||
| # apply map operations on images | |||
| data_set = data_set.map(input_columns="label", operations=type_cast_op) | |||
| data_set = data_set.map(input_columns="image", operations=c_trans) | |||
| # apply repeat operations | |||
| data_set = data_set.repeat(repeat_num) | |||
| # apply shuffle operations | |||
| # data_set = data_set.shuffle(buffer_size=1000) | |||
| # apply batch operations | |||
| data_set = data_set.batch(batch_size=batch_size, drop_remainder=True) | |||
| return data_set | |||
| @@ -12,7 +12,6 @@ | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import os | |||
| import sys | |||
| import mindspore.nn as nn | |||
| from mindspore import context | |||
| @@ -21,11 +20,11 @@ from mindspore.train import Model | |||
| from mindspore.train.callback import ModelCheckpoint, CheckpointConfig, LossMonitor | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -33,7 +32,7 @@ TAG = "Lenet5_train" | |||
| def mnist_train(epoch_size, batch_size, lr, momentum): | |||
| mnist_path = "./MNIST_unzip/" | |||
| mnist_path = "../../dataset/MNIST" | |||
| ds = generate_mnist_dataset(os.path.join(mnist_path, "train"), | |||
| batch_size=batch_size, repeat_size=1) | |||
| @@ -21,6 +21,7 @@ import numpy as np | |||
| import mindspore.nn as nn | |||
| from mindspore.common import initializer as init | |||
| def _calculate_gain(nonlinearity, param=None): | |||
| r""" | |||
| Return the recommended gain value for the given nonlinearity function. | |||
| @@ -62,6 +63,7 @@ def _calculate_gain(nonlinearity, param=None): | |||
| raise ValueError("Unsupported nonlinearity {}".format(nonlinearity)) | |||
| def _assignment(arr, num): | |||
| """Assign the value of `num` to `arr`.""" | |||
| if arr.shape == (): | |||
| @@ -75,6 +77,7 @@ def _assignment(arr, num): | |||
| arr[:] = num | |||
| return arr | |||
| def _calculate_in_and_out(arr): | |||
| """ | |||
| Calculate n_in and n_out. | |||
| @@ -98,6 +101,7 @@ def _calculate_in_and_out(arr): | |||
| n_out *= counter | |||
| return n_in, n_out | |||
| def _select_fan(array, mode): | |||
| mode = mode.lower() | |||
| valid_modes = ['fan_in', 'fan_out'] | |||
| @@ -107,6 +111,7 @@ def _select_fan(array, mode): | |||
| fan_in, fan_out = _calculate_in_and_out(array) | |||
| return fan_in if mode == 'fan_in' else fan_out | |||
| class KaimingInit(init.Initializer): | |||
| r""" | |||
| Base Class. Initialize the array with He kaiming algorithm. | |||
| @@ -0,0 +1,40 @@ | |||
| # Application demos of model security | |||
| ## Introduction | |||
| It has been proved that AI models are vulnerable to adversarial noise that invisible to human eye. Through those | |||
| demos in this package, you will learn to use the tools provided by MindArmour to generate adversarial samples and | |||
| also improve the robustness of your model. | |||
| ## 1. Generate adversarial samples (Attack method) | |||
| Attack methods can be classified into white box attack and black box attack. White-box attack means that the attacker | |||
| is accessible to the model structure and its parameters. Black-box means that the attacker can only obtain the predict | |||
| results of the | |||
| target model. | |||
| ### white-box attack | |||
| Running the classical attack method: FGSM-Attack. | |||
| ```sh | |||
| $ cd examples/model_security/model_attacks/white-box | |||
| $ python mnist_attack_fgsm.py | |||
| ``` | |||
| ### black-box attack | |||
| Running the classical black method: PSO-Attack. | |||
| ```sh | |||
| $ cd examples/model_security/model_attacks/black-box | |||
| $ python mnist_attack_pso.py | |||
| ``` | |||
| ## 2. Improve the robustness of models | |||
| ### adversarial training | |||
| Adversarial training is an effective method to enhance the model's robustness to attacks, in which generated | |||
| adversarial samples are fed into the model for retraining. | |||
| ```sh | |||
| $ cd examples/model_security/model_defenses | |||
| $ python mnist_defense_nad.py | |||
| ``` | |||
| ### adversarial detection | |||
| Besides adversarial training, there is another type of defense method: adversarial detection. This method is mainly | |||
| for black-box attack. The reason is that black-box attacks usually require frequent queries to the model, and the | |||
| difference between adjacent queries input is small. The detection algorithm could analyze the similarity of a series | |||
| of queries and recognize the attack. | |||
| ```sh | |||
| $ cd examples/model_security/model_defenses | |||
| $ python mnist_similarity_detector.py | |||
| ``` | |||
| @@ -11,24 +11,21 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| from scipy.special import softmax | |||
| import numpy as np | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import BlackModel | |||
| from mindarmour.adv_robustness.attacks.black.black_model import BlackModel | |||
| from mindarmour.adv_robustness.attacks.black.genetic_attack import GeneticAttack | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -53,13 +50,13 @@ def test_genetic_attack_on_mnist(): | |||
| Genetic-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,20 +11,18 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import numpy as np | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour import BlackModel | |||
| from mindarmour.adv_robustness.attacks import HopSkipJumpAttack | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -72,14 +70,14 @@ def test_hsja_mnist_attack(): | |||
| hsja-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| net.set_train(False) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,21 +11,18 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import numpy as np | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour import BlackModel | |||
| from mindarmour.adv_robustness.attacks import NES | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -82,14 +79,14 @@ def test_nes_mnist_attack(): | |||
| hsja-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| net.set_train(False) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,22 +11,20 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour import BlackModel | |||
| from mindarmour.adv_robustness.attacks import PointWiseAttack | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| TAG = 'Pointwise_Attack' | |||
| @@ -53,13 +51,13 @@ def test_pointwise_attack_on_mnist(): | |||
| Salt-and-Pepper-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,24 +11,21 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour import BlackModel | |||
| from mindarmour.adv_robustness.attacks.black.pso_attack import PSOAttack | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -53,13 +50,13 @@ def test_pso_attack_on_mnist(): | |||
| PSO-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,22 +11,20 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour import BlackModel | |||
| from mindarmour.adv_robustness.attacks import SaltAndPepperNoiseAttack | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| TAG = 'Salt_and_Pepper_Attack' | |||
| @@ -53,13 +51,13 @@ def test_salt_and_pepper_attack_on_mnist(): | |||
| Salt-and-Pepper-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,24 +11,21 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import CarliniWagnerL2Attack | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -40,13 +37,13 @@ def test_carlini_wagner_attack(): | |||
| CW-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,24 +11,21 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks.deep_fool import DeepFool | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -40,13 +37,13 @@ def test_deepfool_attack(): | |||
| DeepFool-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,25 +11,22 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from mindspore.nn import SoftmaxCrossEntropyWithLogits | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import FastGradientSignMethod | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -41,13 +38,13 @@ def test_fast_gradient_sign_method(): | |||
| FGSM-Attack test for CPU device. | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size) | |||
| @@ -11,26 +11,21 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import JSMAAttack | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -42,13 +37,13 @@ def test_jsma_attack(): | |||
| JSMA-Attack test | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,25 +11,22 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from mindspore.nn import SoftmaxCrossEntropyWithLogits | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import LBFGS | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -41,13 +38,13 @@ def test_lbfgs_attack(): | |||
| LBFGS-Attack test for CPU device. | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| @@ -11,25 +11,22 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from mindspore.nn import SoftmaxCrossEntropyWithLogits | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import MomentumDiverseInputIterativeMethod | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| TAG = 'M_DI2_FGSM_Test' | |||
| @@ -42,13 +39,13 @@ def test_momentum_diverse_input_iterative_method(): | |||
| """ | |||
| context.set_context(mode=context.GRAPH_MODE, device_target="CPU") | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size) | |||
| @@ -11,25 +11,22 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from mindspore.nn import SoftmaxCrossEntropyWithLogits | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import ProjectedGradientDescent | |||
| from mindarmour.adv_robustness.evaluations import AttackEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| LOGGER.set_level('INFO') | |||
| @@ -41,13 +38,13 @@ def test_projected_gradient_descent_method(): | |||
| PGD-Attack test for CPU device. | |||
| """ | |||
| # upload trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds = generate_mnist_dataset(data_list, batch_size) | |||
| @@ -13,7 +13,6 @@ | |||
| # limitations under the License. | |||
| """defense example using nad""" | |||
| import os | |||
| import sys | |||
| import numpy as np | |||
| from mindspore import Tensor | |||
| @@ -27,10 +26,8 @@ from mindarmour.adv_robustness.attacks import FastGradientSignMethod | |||
| from mindarmour.adv_robustness.defenses import NaturalAdversarialDefense | |||
| from mindarmour.utils.logger import LogUtil | |||
| from lenet5_net import LeNet5 | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| LOGGER = LogUtil.get_instance() | |||
| @@ -42,7 +39,7 @@ def test_nad_method(): | |||
| """ | |||
| NAD-Defense test. | |||
| """ | |||
| mnist_path = "./MNIST_unzip/" | |||
| mnist_path = "../../common/dataset/MNIST" | |||
| batch_size = 32 | |||
| # 1. train original model | |||
| ds_train = generate_mnist_dataset(os.path.join(mnist_path, "train"), | |||
| @@ -98,10 +95,19 @@ def test_nad_method(): | |||
| np.mean(acc_list)) | |||
| # 6. defense | |||
| ds_train = generate_mnist_dataset(os.path.join(mnist_path, "train"), | |||
| batch_size=batch_size, repeat_size=1) | |||
| inputs_train = [] | |||
| labels_train = [] | |||
| for data in ds_train.create_tuple_iterator(): | |||
| inputs_train.append(data[0].astype(np.float32)) | |||
| labels_train.append(data[1]) | |||
| inputs_train = np.concatenate(inputs_train) | |||
| labels_train = np.concatenate(labels_train) | |||
| net.set_train() | |||
| nad = NaturalAdversarialDefense(net, loss_fn=loss, optimizer=opt, | |||
| bounds=(0.0, 1.0), eps=0.3) | |||
| nad.batch_defense(inputs, labels, batch_size=32, epochs=10) | |||
| nad.batch_defense(inputs_train, labels_train, batch_size=32, epochs=10) | |||
| # 7. get accuracy of test data on defensed model | |||
| net.set_train(False) | |||
| @@ -13,7 +13,6 @@ | |||
| # limitations under the License. | |||
| """evaluate example""" | |||
| import os | |||
| import sys | |||
| import time | |||
| import numpy as np | |||
| @@ -27,19 +26,17 @@ from mindspore.ops.operations import TensorAdd | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour.adv_robustness.attacks import FastGradientSignMethod | |||
| from mindarmour.adv_robustness.attacks import GeneticAttack | |||
| from mindarmour.adv_robustness.attacks import BlackModel | |||
| from mindarmour.adv_robustness.attacks.black.black_model import BlackModel | |||
| from mindarmour.adv_robustness.defenses import NaturalAdversarialDefense | |||
| from mindarmour.adv_robustness.detectors import SimilarityDetector | |||
| from mindarmour.adv_robustness.evaluations import BlackDefenseEvaluate | |||
| from mindarmour.adv_robustness.evaluations import DefenseEvaluate | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| TAG = 'Defense_Evaluate_Example' | |||
| @@ -126,18 +123,17 @@ class ModelToBeAttacked(BlackModel): | |||
| return self._detected_res | |||
| def test_black_defense(): | |||
| def test_defense_evaluation(): | |||
| # load trained network | |||
| current_dir = os.path.dirname(os.path.abspath(__file__)) | |||
| ckpt_name = os.path.abspath(os.path.join( | |||
| current_dir, './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt')) | |||
| # ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = os.path.abspath(os.path.join( | |||
| current_dir, '../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt')) | |||
| wb_net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(wb_net, load_dict) | |||
| # get test data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../common/dataset/MNIST/test" | |||
| batch_size = 32 | |||
| ds_test = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| inputs = [] | |||
| @@ -324,4 +320,4 @@ if __name__ == '__main__': | |||
| context.set_context(mode=context.GRAPH_MODE, device_target="GPU") | |||
| DEVICE = context.get_context("device_target") | |||
| if DEVICE in ("Ascend", "GPU"): | |||
| test_black_defense() | |||
| test_defense_evaluation() | |||
| @@ -11,26 +11,23 @@ | |||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| # See the License for the specific language governing permissions and | |||
| # limitations under the License. | |||
| import sys | |||
| import numpy as np | |||
| from scipy.special import softmax | |||
| from mindspore import Model | |||
| from mindspore import Tensor | |||
| from mindspore import context | |||
| from mindspore.nn import Cell | |||
| from mindspore.ops.operations import TensorAdd | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| from scipy.special import softmax | |||
| from lenet5_net import LeNet5 | |||
| from mindarmour import BlackModel | |||
| from mindarmour.adv_robustness.attacks.black.pso_attack import PSOAttack | |||
| from mindarmour.adv_robustness.detectors import SimilarityDetector | |||
| from mindarmour.utils.logger import LogUtil | |||
| sys.path.append("..") | |||
| from data_processing import generate_mnist_dataset | |||
| from examples.common.dataset.data_processing import generate_mnist_dataset | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| LOGGER = LogUtil.get_instance() | |||
| TAG = 'Similarity Detector test' | |||
| @@ -95,13 +92,13 @@ def test_similarity_detector(): | |||
| Similarity Detector test. | |||
| """ | |||
| # load trained network | |||
| ckpt_name = './trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| ckpt_path = '../../common/networks/lenet5/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt' | |||
| net = LeNet5() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get mnist data | |||
| data_list = "./MNIST_unzip/test" | |||
| data_list = "../../common/dataset/MNIST/test" | |||
| batch_size = 1000 | |||
| ds = generate_mnist_dataset(data_list, batch_size=batch_size) | |||
| model = ModelToBeAttacked(net) | |||
| @@ -0,0 +1,33 @@ | |||
| # Application demos of privacy stealing and privacy protection | |||
| ## Introduction | |||
| Although machine learning could obtain a generic model based on training data, it has been proved that the trained | |||
| model may disclose the information of training data (such as the membership inference attack). Differential | |||
| privacy training | |||
| is an effective | |||
| method proposed | |||
| to overcome this problem, in which Gaussian noise is added while training. There are mainly three parts for | |||
| differential privacy(DP) training: noise-generating mechanism, DP optimizer and DP monitor. We have implemented | |||
| a novel noise-generating mechanisms: adaptive decay noise mechanism. DP | |||
| monitor is used to compute the privacy budget while training. | |||
| ## 1. Adaptive decay DP training | |||
| With adaptive decay mechanism, the magnitude of the Gaussian noise would be decayed as the training step grows, which | |||
| resulting a stable convergence. | |||
| ```sh | |||
| $ cd examples/privacy/diff_privacy | |||
| $ python lenet5_dp_ada_gaussian.py | |||
| ``` | |||
| ## 2. Adaptive norm clip training | |||
| With adaptive norm clip mechanism, the norm clip of the gradients would be changed according to the norm values of | |||
| them, which can adjust the ratio of noise and original gradients. | |||
| ```sh | |||
| $ cd examples/privacy/diff_privacy | |||
| $ python lenet5_dp.py | |||
| ``` | |||
| ## 3. Membership inference attack | |||
| By this attack method, we could judge whether a sample is belongs to training dataset or not. | |||
| ```sh | |||
| $ cd examples/privacy/membership_inference_attack | |||
| $ python vgg_cifar_attack.py | |||
| ``` | |||
| @@ -29,7 +29,7 @@ mnist_cfg = edict({ | |||
| 'save_checkpoint_steps': 234, # the interval steps for saving checkpoint file of the model | |||
| 'keep_checkpoint_max': 10, # the maximum number of checkpoint files would be saved | |||
| 'device_target': 'Ascend', # device used | |||
| 'data_path': './MNIST_unzip', # the path of training and testing data set | |||
| 'data_path': '../../common/dataset/MNIST', # the path of training and testing data set | |||
| 'dataset_sink_mode': False, # whether deliver all training data to device one time | |||
| 'micro_batches': 32, # the number of small batches split from an original batch | |||
| 'norm_bound': 1.0, # the clip bound of the gradients of model's training parameters | |||
| @@ -29,7 +29,7 @@ mnist_cfg = edict({ | |||
| 'save_checkpoint_steps': 234, # the interval steps for saving checkpoint file of the model | |||
| 'keep_checkpoint_max': 10, # the maximum number of checkpoint files would be saved | |||
| 'device_target': 'Ascend', # device used | |||
| 'data_path': './MNIST_unzip', # the path of training and testing data set | |||
| 'data_path': '../../common/dataset/MNIST', # the path of training and testing data set | |||
| 'dataset_sink_mode': False, # whether deliver all training data to device one time | |||
| 'micro_batches': 16, # the number of small batches split from an original batch | |||
| 'norm_bound': 1.0, # the clip bound of the gradients of model's training parameters | |||
| @@ -29,7 +29,7 @@ mnist_cfg = edict({ | |||
| 'save_checkpoint_steps': 234, # the interval steps for saving checkpoint file of the model | |||
| 'keep_checkpoint_max': 10, # the maximum number of checkpoint files would be saved | |||
| 'device_target': 'Ascend', # device used | |||
| 'data_path': './MNIST_unzip', # the path of training and testing data set | |||
| 'data_path': '../../common/dataset/MNIST', # the path of training and testing data set | |||
| 'dataset_sink_mode': False, # whether deliver all training data to device one time | |||
| 'micro_batches': 32, # the number of small batches split from an original batch | |||
| 'norm_bound': 1.0, # the clip bound of the gradients of model's training parameters | |||
| @@ -24,9 +24,9 @@ from mindspore.train.callback import LossMonitor | |||
| from mindspore.nn.metrics import Accuracy | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| import mindspore.dataset as ds | |||
| import mindspore.dataset.transforms.vision.c_transforms as CV | |||
| import mindspore.dataset.vision.c_transforms as CV | |||
| import mindspore.dataset.transforms.c_transforms as C | |||
| from mindspore.dataset.transforms.vision import Inter | |||
| from mindspore.dataset.vision import Inter | |||
| import mindspore.common.dtype as mstype | |||
| from mindarmour.privacy.diff_privacy import DPModel | |||
| @@ -34,7 +34,7 @@ from mindarmour.privacy.diff_privacy import PrivacyMonitorFactory | |||
| from mindarmour.privacy.diff_privacy import NoiseMechanismsFactory | |||
| from mindarmour.privacy.diff_privacy import ClipMechanismsFactory | |||
| from mindarmour.utils.logger import LogUtil | |||
| from lenet5_net import LeNet5 | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from lenet5_config import mnist_cfg as cfg | |||
| LOGGER = LogUtil.get_instance() | |||
| @@ -24,16 +24,16 @@ from mindspore.train.callback import LossMonitor | |||
| from mindspore.nn.metrics import Accuracy | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| import mindspore.dataset as ds | |||
| import mindspore.dataset.transforms.vision.c_transforms as CV | |||
| import mindspore.dataset.vision.c_transforms as CV | |||
| import mindspore.dataset.transforms.c_transforms as C | |||
| from mindspore.dataset.transforms.vision import Inter | |||
| from mindspore.dataset.vision import Inter | |||
| import mindspore.common.dtype as mstype | |||
| from mindarmour.privacy.diff_privacy import DPModel | |||
| from mindarmour.privacy.diff_privacy import PrivacyMonitorFactory | |||
| from mindarmour.privacy.diff_privacy import NoiseMechanismsFactory | |||
| from mindarmour.utils.logger import LogUtil | |||
| from lenet5_net import LeNet5 | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from dp_ada_gaussian_config import mnist_cfg as cfg | |||
| LOGGER = LogUtil.get_instance() | |||
| @@ -24,16 +24,16 @@ from mindspore.train.callback import LossMonitor | |||
| from mindspore.nn.metrics import Accuracy | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| import mindspore.dataset as ds | |||
| import mindspore.dataset.transforms.vision.c_transforms as CV | |||
| import mindspore.dataset.vision.c_transforms as CV | |||
| import mindspore.dataset.transforms.c_transforms as C | |||
| from mindspore.dataset.transforms.vision import Inter | |||
| from mindspore.dataset.vision import Inter | |||
| import mindspore.common.dtype as mstype | |||
| from mindarmour.privacy.diff_privacy import DPModel | |||
| from mindarmour.privacy.diff_privacy import PrivacyMonitorFactory | |||
| from mindarmour.privacy.diff_privacy import NoiseMechanismsFactory | |||
| from mindarmour.utils.logger import LogUtil | |||
| from lenet5_net import LeNet5 | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from dp_ada_sgd_graph_config import mnist_cfg as cfg | |||
| LOGGER = LogUtil.get_instance() | |||
| @@ -24,9 +24,9 @@ from mindspore.train.callback import LossMonitor | |||
| from mindspore.nn.metrics import Accuracy | |||
| from mindspore.train.serialization import load_checkpoint, load_param_into_net | |||
| import mindspore.dataset as ds | |||
| import mindspore.dataset.transforms.vision.c_transforms as CV | |||
| import mindspore.dataset.vision.c_transforms as CV | |||
| import mindspore.dataset.transforms.c_transforms as C | |||
| from mindspore.dataset.transforms.vision import Inter | |||
| from mindspore.dataset.vision import Inter | |||
| import mindspore.common.dtype as mstype | |||
| from mindarmour.privacy.diff_privacy import DPModel | |||
| @@ -34,7 +34,7 @@ from mindarmour.privacy.diff_privacy import PrivacyMonitorFactory | |||
| from mindarmour.privacy.diff_privacy import DPOptimizerClassFactory | |||
| from mindarmour.privacy.diff_privacy import ClipMechanismsFactory | |||
| from mindarmour.utils.logger import LogUtil | |||
| from lenet5_net import LeNet5 | |||
| from examples.common.networks.lenet5.lenet5_net import LeNet5 | |||
| from lenet5_config import mnist_cfg as cfg | |||
| LOGGER = LogUtil.get_instance() | |||
| @@ -27,9 +27,9 @@ from mindspore.ops import functional as F | |||
| from mindspore.common import dtype as mstype | |||
| from mindarmour.utils import LogUtil | |||
| from vgg.vgg import vgg16 | |||
| from vgg.dataset import vgg_create_dataset100 | |||
| from vgg.config import cifar_cfg as cfg | |||
| from examples.common.networks.vgg.vgg import vgg16 | |||
| from examples.common.dataset.data_processing import vgg_create_dataset100 | |||
| from examples.common.networks.vgg.config import cifar_cfg as cfg | |||
| class ParameterReduce(nn.Cell): | |||
| @@ -32,13 +32,13 @@ from mindspore.train.model import Model | |||
| from mindspore.train.serialization import load_param_into_net, load_checkpoint | |||
| from mindarmour.utils import LogUtil | |||
| from vgg.dataset import vgg_create_dataset100 | |||
| from vgg.warmup_step_lr import warmup_step_lr | |||
| from vgg.warmup_cosine_annealing_lr import warmup_cosine_annealing_lr | |||
| from vgg.warmup_step_lr import lr_steps | |||
| from vgg.utils.util import get_param_groups | |||
| from vgg.vgg import vgg16 | |||
| from vgg.config import cifar_cfg as cfg | |||
| from examples.common.dataset.data_processing import vgg_create_dataset100 | |||
| from examples.common.networks.vgg.warmup_step_lr import warmup_step_lr | |||
| from examples.common.networks.vgg.warmup_cosine_annealing_lr import warmup_cosine_annealing_lr | |||
| from examples.common.networks.vgg.warmup_step_lr import lr_steps | |||
| from examples.common.networks.vgg.utils.util import get_param_groups | |||
| from examples.common.networks.vgg.vgg import vgg16 | |||
| from examples.common.networks.vgg.config import cifar_cfg as cfg | |||
| TAG = "train" | |||
| @@ -17,12 +17,6 @@ Examples of membership inference | |||
| """ | |||
| import argparse | |||
| import sys | |||
| from vgg.vgg import vgg16 | |||
| from vgg.config import cifar_cfg as cfg | |||
| from vgg.utils.util import get_param_groups | |||
| from vgg.dataset import vgg_create_dataset100 | |||
| import numpy as np | |||
| from mindspore.train import Model | |||
| @@ -30,10 +24,16 @@ from mindspore.train.serialization import load_param_into_net, load_checkpoint | |||
| import mindspore.nn as nn | |||
| from mindarmour.privacy.evaluation import MembershipInference | |||
| from mindarmour.utils import LogUtil | |||
| from examples.common.networks.vgg.vgg import vgg16 | |||
| from examples.common.networks.vgg.config import cifar_cfg as cfg | |||
| from examples.common.networks.vgg.utils.util import get_param_groups | |||
| from examples.common.dataset.data_processing import vgg_create_dataset100 | |||
| logging = LogUtil.get_instance() | |||
| logging.set_level(20) | |||
| sys.path.append("../../") | |||
| sys.path.append("../../../") | |||
| TAG = "membership inference example" | |||
| @@ -72,10 +72,10 @@ def create_target_images(dataset, data_labels, target_labels): | |||
| def get_model(): | |||
| # upload trained network | |||
| current_dir = os.path.dirname(os.path.abspath(__file__)) | |||
| ckpt_name = os.path.join(current_dir, | |||
| ckpt_path = os.path.join(current_dir, | |||
| '../../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt') | |||
| net = Net() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| net.set_train(False) | |||
| model = ModelToBeAttacked(net) | |||
| @@ -78,10 +78,10 @@ def create_target_images(dataset, data_labels, target_labels): | |||
| def get_model(current_dir): | |||
| ckpt_name = os.path.join(current_dir, | |||
| ckpt_path = os.path.join(current_dir, | |||
| '../../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt') | |||
| net = Net() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| net.set_train(False) | |||
| model = ModelToBeAttacked(net) | |||
| @@ -60,10 +60,10 @@ def test_pointwise_attack_method(): | |||
| np.random.seed(123) | |||
| # upload trained network | |||
| current_dir = os.path.dirname(os.path.abspath(__file__)) | |||
| ckpt_name = os.path.join(current_dir, | |||
| ckpt_path = os.path.join(current_dir, | |||
| '../../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt') | |||
| net = Net() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get one mnist image | |||
| @@ -46,10 +46,10 @@ def test_lbfgs_attack(): | |||
| np.random.seed(123) | |||
| # upload trained network | |||
| current_dir = os.path.dirname(os.path.abspath(__file__)) | |||
| ckpt_name = os.path.join(current_dir, | |||
| ckpt_path = os.path.join(current_dir, | |||
| '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt') | |||
| net = Net() | |||
| load_dict = load_checkpoint(ckpt_name) | |||
| load_dict = load_checkpoint(ckpt_path) | |||
| load_param_into_net(net, load_dict) | |||
| # get one mnist image | |||