| @@ -16,8 +16,8 @@ Some basic function for nlp | |||||
| """ | """ | ||||
| from enum import IntEnum | from enum import IntEnum | ||||
| import mindspore._c_dataengine as cde | |||||
| import numpy as np | import numpy as np | ||||
| import mindspore._c_dataengine as cde | |||||
| from .validators import check_from_file, check_from_list, check_from_dict | from .validators import check_from_file, check_from_list, check_from_dict | ||||
| @@ -13,15 +13,16 @@ | |||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| """lstm""" | """lstm""" | ||||
| from mindspore.ops import operations as P | |||||
| from mindspore.nn.cell import Cell | |||||
| from mindspore.common.parameter import Parameter, ParameterTuple | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore._checkparam import Validator as validator | |||||
| from mindspore import context | |||||
| import numpy as np | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| from mindspore import context | |||||
| from mindspore._checkparam import Validator as validator | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter, ParameterTuple | |||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| import numpy as np | |||||
| from mindspore.nn.cell import Cell | |||||
| from mindspore.ops import operations as P | |||||
| __all__ = ['LSTM', 'LSTMCell'] | __all__ = ['LSTM', 'LSTMCell'] | ||||
| @@ -18,8 +18,8 @@ | |||||
| from typing import Callable, List, Any | from typing import Callable, List, Any | ||||
| import mindspore._c_expression as _c_expression | |||||
| import numpy as np | import numpy as np | ||||
| import mindspore._c_expression as _c_expression | |||||
| from mindspore import ParameterTuple | from mindspore import ParameterTuple | ||||
| from mindspore import Tensor | from mindspore import Tensor | ||||
| @@ -22,9 +22,9 @@ import os | |||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore import Tensor | from mindspore import Tensor | ||||
| from model_zoo.bert.src import BertConfig, BertNetworkWithLoss, BertTrainOneStepCell, BertTrainOneStepWithLossScaleCell | |||||
| from mindspore.nn.optim import AdamWeightDecayDynamicLR | from mindspore.nn.optim import AdamWeightDecayDynamicLR | ||||
| from mindspore.train.loss_scale_manager import DynamicLossScaleManager | from mindspore.train.loss_scale_manager import DynamicLossScaleManager | ||||
| from model_zoo.bert.src import BertConfig, BertNetworkWithLoss, BertTrainOneStepCell, BertTrainOneStepWithLossScaleCell | |||||
| from ...dataset_mock import MindData | from ...dataset_mock import MindData | ||||
| from ...ops_common import nn, np, batch_tuple_tensor, build_construct_graph | from ...ops_common import nn, np, batch_tuple_tensor, build_construct_graph | ||||
| @@ -91,10 +91,10 @@ class Net2(nn.Cell): | |||||
| self.all_reduce3 = P.AllReduce(self.op2, group=NCCL_WORLD_COMM_GROUP) | self.all_reduce3 = P.AllReduce(self.op2, group=NCCL_WORLD_COMM_GROUP) | ||||
| def construct(self): | def construct(self): | ||||
| x = self.all_reduce1(self.x1) | |||||
| y = self.all_reduce2(x) | |||||
| x_ = self.all_reduce1(self.x1) | |||||
| y = self.all_reduce2(x_) | |||||
| z = self.all_reduce3(y) | z = self.all_reduce3(y) | ||||
| return (x, y, z) | |||||
| return (x_, y, z) | |||||
| def test_AllReduce2(): | def test_AllReduce2(): | ||||
| @@ -17,8 +17,11 @@ | |||||
| import os | import os | ||||
| import time | import time | ||||
| import pytest | |||||
| import numpy as np | import numpy as np | ||||
| import pytest | |||||
| from src.bert_for_pre_training import BertNetworkWithLoss, BertTrainOneStepWithLossScaleCell | |||||
| from src.bert_model import BertConfig | |||||
| import mindspore.common.dtype as mstype | import mindspore.common.dtype as mstype | ||||
| import mindspore.dataset.engine.datasets as de | import mindspore.dataset.engine.datasets as de | ||||
| @@ -26,8 +29,6 @@ import mindspore.dataset.transforms.c_transforms as C | |||||
| from mindspore import context | from mindspore import context | ||||
| from mindspore import log as logger | from mindspore import log as logger | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from src.bert_model import BertConfig | |||||
| from src.bert_for_pre_training import BertNetworkWithLoss, BertTrainOneStepWithLossScaleCell | |||||
| from mindspore.nn.optim import Lamb | from mindspore.nn.optim import Lamb | ||||
| from mindspore.train.callback import Callback | from mindspore.train.callback import Callback | ||||
| from mindspore.train.loss_scale_manager import DynamicLossScaleManager | from mindspore.train.loss_scale_manager import DynamicLossScaleManager | ||||
| @@ -13,8 +13,9 @@ | |||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| import numpy as np | |||||
| import os | import os | ||||
| import numpy as np | |||||
| import pytest | import pytest | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| @@ -397,5 +397,5 @@ def test_trainTensor_amp(num_classes=10, epoch=18, batch_size=16): | |||||
| loss = train_network(data, label) | loss = train_network(data, label) | ||||
| losses.append(loss) | losses.append(loss) | ||||
| assert (losses[-1][0].asnumpy() < 1) | assert (losses[-1][0].asnumpy() < 1) | ||||
| assert (losses[-1][1].asnumpy() == False) | |||||
| assert not losses[-1][1].asnumpy() | |||||
| assert (losses[-1][2].asnumpy() > 1) | assert (losses[-1][2].asnumpy() > 1) | ||||
| @@ -19,9 +19,8 @@ Usage: | |||||
| python test_network_main.py --net lenet --target Ascend | python test_network_main.py --net lenet --target Ascend | ||||
| """ | """ | ||||
| import argparse | import argparse | ||||
| import numpy as np | import numpy as np | ||||
| import os | |||||
| import time | |||||
| from models.alexnet import AlexNet | from models.alexnet import AlexNet | ||||
| from models.lenet import LeNet | from models.lenet import LeNet | ||||
| from models.resnetv1_5 import resnet50 | from models.resnetv1_5 import resnet50 | ||||
| @@ -35,10 +35,10 @@ def test_clip_by_norm_const(): | |||||
| super(Network, self).__init__() | super(Network, self).__init__() | ||||
| self.norm_value = Tensor(np.array([1]).astype(np.float32)) | self.norm_value = Tensor(np.array([1]).astype(np.float32)) | ||||
| self.clip = nn.ClipByNorm() | self.clip = nn.ClipByNorm() | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.clip(x, self.norm_value) | return self.clip(x, self.norm_value) | ||||
| net = Network() | net = Network() | ||||
| x = Tensor(np.array([[-2, 0, 0], [0, 3, 4]]).astype(np.float32)) | x = Tensor(np.array([[-2, 0, 0], [0, 3, 4]]).astype(np.float32)) | ||||
| output = net(x) | |||||
| net(x) | |||||
| @@ -14,6 +14,7 @@ | |||||
| # ============================================================================ | # ============================================================================ | ||||
| """ test ops """ | """ test ops """ | ||||
| import functools | import functools | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| @@ -898,7 +899,7 @@ test_case_nn_ops = [ | |||||
| 'skip': ['backward']}), | 'skip': ['backward']}), | ||||
| ('BasicLSTMCell', { | ('BasicLSTMCell', { | ||||
| 'block': P.BasicLSTMCell(keep_prob=1.0, forget_bias=1.0, state_is_tuple=True, activation='tanh'), | 'block': P.BasicLSTMCell(keep_prob=1.0, forget_bias=1.0, state_is_tuple=True, activation='tanh'), | ||||
| 'desc_inputs': [[128, 128], [128, 128], [128, 128], [512, 256, 1, 1],[512, 1, 1, 1]], | |||||
| 'desc_inputs': [[128, 128], [128, 128], [128, 128], [512, 256, 1, 1], [512, 1, 1, 1]], | |||||
| 'desc_bprop': [[128, 128], [128, 128], [128, 128], [128, 128], [128, 128], [128, 128], [128, 128]], | 'desc_bprop': [[128, 128], [128, 128], [128, 128], [128, 128], [128, 128], [128, 128], [128, 128]], | ||||
| 'skip': []}), | 'skip': []}), | ||||
| ('TopK', { | ('TopK', { | ||||
| @@ -1324,7 +1325,7 @@ test_case_array_ops = [ | |||||
| 'desc_inputs': [(Tensor(np.array([1], np.float32)), | 'desc_inputs': [(Tensor(np.array([1], np.float32)), | ||||
| Tensor(np.array([1], np.float32)), | Tensor(np.array([1], np.float32)), | ||||
| Tensor(np.array([1], np.float32)))], | Tensor(np.array([1], np.float32)))], | ||||
| 'desc_bprop': [[3, ]]}), | |||||
| 'desc_bprop': [[3,]]}), | |||||
| ('Pack_0', { | ('Pack_0', { | ||||
| 'block': NetForPackInput(P.Pack()), | 'block': NetForPackInput(P.Pack()), | ||||
| 'desc_inputs': [[2, 2], [2, 2], [2, 2]], | 'desc_inputs': [[2, 2], [2, 2], [2, 2]], | ||||
| @@ -1486,8 +1487,7 @@ test_case = functools.reduce(lambda x, y: x + y, test_case_lists) | |||||
| test_exec_case = test_case | test_exec_case = test_case | ||||
| test_backward_exec_case = filter(lambda x: 'skip' not in x[1] or | |||||
| 'backward' not in x[1]['skip'], test_case) | |||||
| test_backward_exec_case = filter(lambda x: 'skip' not in x[1] or 'backward' not in x[1]['skip'], test_case) | |||||
| @non_graph_engine | @non_graph_engine | ||||
| @@ -144,7 +144,7 @@ def test_op_forward_infererror(): | |||||
| input_np = np.random.randn(2, 3, 4, 5).astype(np.float32) | input_np = np.random.randn(2, 3, 4, 5).astype(np.float32) | ||||
| input_me = Tensor(input_np) | input_me = Tensor(input_np) | ||||
| net = Net3() | net = Net3() | ||||
| with pytest.raises(TypeError) as e: | |||||
| with pytest.raises(TypeError): | |||||
| net(input_me) | net(input_me) | ||||
| @@ -162,7 +162,7 @@ def test_sequential_resolve_error(): | |||||
| input_np = np.random.randn(2, 3, 4, 5).astype(np.float32) | input_np = np.random.randn(2, 3, 4, 5).astype(np.float32) | ||||
| input_me = Tensor(input_np) | input_me = Tensor(input_np) | ||||
| net = SequenceNet() | net = SequenceNet() | ||||
| with pytest.raises(RuntimeError) as e: | |||||
| with pytest.raises(RuntimeError): | |||||
| net(input_me) | net(input_me) | ||||
| @@ -19,7 +19,6 @@ import mindspore.nn as nn | |||||
| from mindspore import Tensor | from mindspore import Tensor | ||||
| from mindspore import context | from mindspore import context | ||||
| from mindspore import dtype as mstype | from mindspore import dtype as mstype | ||||
| from mindspore.common.api import _executor | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -44,7 +44,7 @@ class ChooseInitParameterWithInput(nn.Cell): | |||||
| self.x = Parameter(Tensor(np.ones(2), dtype=mstype.int32), name='x') | self.x = Parameter(Tensor(np.ones(2), dtype=mstype.int32), name='x') | ||||
| @ms_function | @ms_function | ||||
| def construct(self, input): | |||||
| def construct(self, input_data): | |||||
| return self.x | return self.x | ||||
| @@ -57,7 +57,7 @@ def test_choose_init_param(): | |||||
| def test_choose_param_with_input(): | def test_choose_param_with_input(): | ||||
| choose = ChooseInitParameterWithInput() | choose = ChooseInitParameterWithInput() | ||||
| input = Tensor(np.zeros(2), dtype=mstype.int32) | |||||
| input_data = Tensor(np.zeros(2), dtype=mstype.int32) | |||||
| expect = Tensor(np.ones(2), dtype=mstype.int32) | expect = Tensor(np.ones(2), dtype=mstype.int32) | ||||
| out = choose(input) | |||||
| out = choose(input_data) | |||||
| assert np.allclose(expect.asnumpy(), out.asnumpy()) | assert np.allclose(expect.asnumpy(), out.asnumpy()) | ||||
| @@ -1,10 +1,11 @@ | |||||
| import numpy as np | import numpy as np | ||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| import mindspore.ops.operations as P | import mindspore.ops.operations as P | ||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context, Tensor, ParameterTuple | from mindspore import context, Tensor, ParameterTuple | ||||
| from mindspore.common.initializer import TruncatedNormal | from mindspore.common.initializer import TruncatedNormal | ||||
| from mindspore.nn import WithLossCell, Momentum | from mindspore.nn import WithLossCell, Momentum | ||||
| from mindspore.ops import composite as C | |||||
| context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") | context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") | ||||
| @@ -45,7 +46,6 @@ class LeNet5(nn.Cell): | |||||
| Lenet network | Lenet network | ||||
| Args: | Args: | ||||
| num_class (int): Num classes. Default: 10. | num_class (int): Num classes. Default: 10. | ||||
| Returns: | Returns: | ||||
| Tensor, output tensor | Tensor, output tensor | ||||
| @@ -21,7 +21,6 @@ from mindspore import Tensor | |||||
| from mindspore import context | from mindspore import context | ||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| from mindspore.ops import composite as C | from mindspore.ops import composite as C | ||||
| from mindspore.ops import functional as F | |||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from ....mindspore_test_framework.utils.bprop_util import bprop | from ....mindspore_test_framework.utils.bprop_util import bprop | ||||
| from ....mindspore_test_framework.utils.debug_util import PrintShapeTypeCell, PrintGradShapeTypeCell | from ....mindspore_test_framework.utils.debug_util import PrintShapeTypeCell, PrintGradShapeTypeCell | ||||
| @@ -19,9 +19,10 @@ | |||||
| @Desc : test parse the object's method | @Desc : test parse the object's method | ||||
| """ | """ | ||||
| import logging | import logging | ||||
| from dataclasses import dataclass | |||||
| import numpy as np | import numpy as np | ||||
| import pytest | import pytest | ||||
| from dataclasses import dataclass | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| from mindspore import context | from mindspore import context | ||||
| @@ -19,9 +19,7 @@ import mindspore.nn as nn | |||||
| from mindspore import context | from mindspore import context | ||||
| from mindspore.common.tensor import Tensor | from mindspore.common.tensor import Tensor | ||||
| from mindspore.nn import WithGradCell, WithLossCell | from mindspore.nn import WithGradCell, WithLossCell | ||||
| from mindspore.nn.optim import Momentum | |||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| from mindspore.train.model import Model | |||||
| from ..ut_filter import non_graph_engine | from ..ut_filter import non_graph_engine | ||||
| @@ -13,14 +13,9 @@ | |||||
| # limitations under the License. | # limitations under the License. | ||||
| # ============================================================================ | # ============================================================================ | ||||
| """ tests for quant """ | """ tests for quant """ | ||||
| import numpy as np | |||||
| from mobilenetv2_combined import MobileNetV2 | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore import Tensor | |||||
| from mindspore import nn | from mindspore import nn | ||||
| from mindspore.nn.layer import combined | from mindspore.nn.layer import combined | ||||
| from mindspore.train.quant import quant as qat | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="GPU") | context.set_context(mode=context.GRAPH_MODE, device_target="GPU") | ||||
| @@ -74,7 +74,7 @@ class LossNet(nn.Cell): | |||||
| return out | return out | ||||
| def test_Model_Checkpoint_prefix_invalid(): | |||||
| def test_model_checkpoint_prefix_invalid(): | |||||
| """Test ModelCheckpoint prefix invalid.""" | """Test ModelCheckpoint prefix invalid.""" | ||||
| with pytest.raises(ValueError): | with pytest.raises(ValueError): | ||||
| ModelCheckpoint(123) | ModelCheckpoint(123) | ||||
| @@ -338,9 +338,9 @@ def test_step_end_save_graph(): | |||||
| ckpoint_cb.begin(run_context) | ckpoint_cb.begin(run_context) | ||||
| # import pdb;pdb.set_trace() | # import pdb;pdb.set_trace() | ||||
| ckpoint_cb.step_end(run_context) | ckpoint_cb.step_end(run_context) | ||||
| assert os.path.exists('./test_files/test-graph.meta') == True | |||||
| assert os.path.exists('./test_files/test-graph.meta') | |||||
| if os.path.exists('./test_files/test-graph.meta'): | if os.path.exists('./test_files/test-graph.meta'): | ||||
| os.chmod('./test_files/test-graph.meta', stat.S_IWRITE) | os.chmod('./test_files/test-graph.meta', stat.S_IWRITE) | ||||
| os.remove('./test_files/test-graph.meta') | os.remove('./test_files/test-graph.meta') | ||||
| ckpoint_cb.step_end(run_context) | ckpoint_cb.step_end(run_context) | ||||
| assert os.path.exists('./test_files/test-graph.meta') == False | |||||
| assert not os.path.exists('./test_files/test-graph.meta') | |||||
| @@ -71,16 +71,16 @@ def setup_module(): | |||||
| def test_save_graph(): | def test_save_graph(): | ||||
| """ test_exec_save_graph """ | """ test_exec_save_graph """ | ||||
| class Net(nn.Cell): | |||||
| class Net1(nn.Cell): | |||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | |||||
| super(Net1, self).__init__() | |||||
| self.add = P.TensorAdd() | self.add = P.TensorAdd() | ||||
| def construct(self, x, y): | def construct(self, x, y): | ||||
| z = self.add(x, y) | z = self.add(x, y) | ||||
| return z | return z | ||||
| net = Net() | |||||
| net = Net1() | |||||
| net.set_train() | net.set_train() | ||||
| out_me_list = [] | out_me_list = [] | ||||
| x = Tensor(np.random.rand(2, 1, 2, 3).astype(np.float32)) | x = Tensor(np.random.rand(2, 1, 2, 3).astype(np.float32)) | ||||