You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_optimizer.py 2.5 kB

5 years ago
5 years ago
5 years ago
5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. # Copyright 2019 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import numpy as np
  15. import mindspore.context as context
  16. import mindspore.nn as nn
  17. from mindspore import Tensor, Parameter
  18. from mindspore.common.api import _executor
  19. from mindspore.communication.management import init
  20. from mindspore.nn import Dense
  21. from mindspore.nn import Momentum
  22. from mindspore.nn import TrainOneStepCell, WithLossCell
  23. from mindspore.ops import operations as P
  24. from mindspore.train.parallel_utils import ParallelMode
  25. class Net(nn.Cell):
  26. def __init__(self, input_channel, out_channel):
  27. super(Net, self).__init__()
  28. weight_init1 = np.ones([64, 128]).astype(np.float32)
  29. weight_init2 = np.ones([32, 64]).astype(np.float32)
  30. self.weight1 = Parameter(Tensor(weight_init1), "loss_weight1", layerwise_parallel=True)
  31. self.weight2 = Parameter(Tensor(weight_init2), "loss_weight2", layerwise_parallel=True)
  32. self.fc = P.MatMul(transpose_b=True)
  33. self.dense = Dense(input_channel, out_channel)
  34. def construct(self, x):
  35. x = self.dense(x)
  36. x = self.fc(x, self.weight1)
  37. x = self.fc(x, self.weight2)
  38. return x
  39. def test_dense_gen_graph():
  40. context.set_context(mode=context.GRAPH_MODE)
  41. init()
  42. network = Net(512, 128)
  43. loss_fn = nn.SoftmaxCrossEntropyWithLogits()
  44. optimizer = Momentum(filter(lambda x: x.requires_grad, network.get_parameters()),
  45. learning_rate=0.1,
  46. momentum=0.9)
  47. network = WithLossCell(network, loss_fn)
  48. context.reset_auto_parallel_context()
  49. context.set_auto_parallel_context(parallel_mode=ParallelMode.HYBRID_PARALLEL, mirror_mean=True, device_num=8)
  50. network = TrainOneStepCell(network, optimizer)
  51. predict = Tensor(np.ones([64, 512]).astype(np.float32) * 0.01)
  52. label = Tensor(np.zeros([64, 32]).astype(np.float32))
  53. network.set_auto_parallel()
  54. _executor.compile(network, predict, label)