| @@ -1,44 +1,44 @@ | |||||
| # Copyright 2019 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0) | |||||
| self.variable = Parameter(initializer( | |||||
| 'normal', [2, 3, 3, 4]), name='variable') | |||||
| self.accumulation = Parameter(initializer( | |||||
| 'normal', [2, 3, 3, 4]), name='accumulation') | |||||
| self.learning_rate = Parameter(initializer( | |||||
| 'normal', [1, ]), name='learning_rate') | |||||
| self.gradient = Parameter(initializer( | |||||
| 'normal', [2, 3, 3, 4]), name='gradient') | |||||
| self.momentum = Parameter(initializer( | |||||
| 'normal', [1, ]), name='momentum') | |||||
| def construct(self): | |||||
| return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum) | |||||
| def test_net(): | |||||
| apply_momentum = Net() | |||||
| output = apply_momentum() | |||||
| print(output.asnumpy()) | |||||
| # Copyright 2019 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0) | |||||
| self.variable = Parameter(initializer( | |||||
| 'normal', [2, 3, 3, 4]), name='variable') | |||||
| self.accumulation = Parameter(initializer( | |||||
| 'normal', [2, 3, 3, 4]), name='accumulation') | |||||
| self.learning_rate = Parameter(initializer( | |||||
| 'normal', [1, ]), name='learning_rate') | |||||
| self.gradient = Parameter(initializer( | |||||
| 'normal', [2, 3, 3, 4]), name='gradient') | |||||
| self.momentum = Parameter(initializer( | |||||
| 'normal', [1, ]), name='momentum') | |||||
| def construct(self): | |||||
| return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum) | |||||
| def test_net(): | |||||
| apply_momentum = Net() | |||||
| output = apply_momentum() | |||||
| print(output.asnumpy()) | |||||
| @@ -1,42 +1,42 @@ | |||||
| # Copyright 2019 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| from mindspore.ops.operations import _grad_ops as G | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.bias_add_grad = G.BiasAddGrad() | |||||
| #self.dout = Parameter(initializer( | |||||
| #'normal', [2, 3, 3, 4]), name='dout') | |||||
| @ms_function | |||||
| def construct(self, dout): | |||||
| return self.bias_add_grad(dout) | |||||
| dout = np.ones([2,3,4,4]).astype(np.float32) | |||||
| bias_add_grad = Net() | |||||
| output = bias_add_grad(Tensor(dout)) | |||||
| expect_output = np.array([32.,32.,32.]).astype(np.float32) | |||||
| assert np.all(output.asnumpy()==expect_output), "bias_add_grad execute failed, please check current code commit" | |||||
| print(output.asnumpy()) | |||||
| # Copyright 2019 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| from mindspore.ops.operations import _grad_ops as G | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.bias_add_grad = G.BiasAddGrad() | |||||
| #self.dout = Parameter(initializer( | |||||
| #'normal', [2, 3, 3, 4]), name='dout') | |||||
| @ms_function | |||||
| def construct(self, dout): | |||||
| return self.bias_add_grad(dout) | |||||
| dout = np.ones([2,3,4,4]).astype(np.float32) | |||||
| bias_add_grad = Net() | |||||
| output = bias_add_grad(Tensor(dout)) | |||||
| expect_output = np.array([32.,32.,32.]).astype(np.float32) | |||||
| assert np.all(output.asnumpy()==expect_output), "bias_add_grad execute failed, please check current code commit" | |||||
| print(output.asnumpy()) | |||||
| @@ -1,39 +1,39 @@ | |||||
| # Copyright 2019 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| from mindspore.ops.operations import _grad_ops as G | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.bias_add_grad = G.BiasAddGrad() | |||||
| @ms_function | |||||
| def construct(self, dout): | |||||
| return self.bias_add_grad(dout) | |||||
| def test_net(): | |||||
| dout = np.random.rand(1, 1001).astype(np.float32) | |||||
| bias_add_grad = Net() | |||||
| output = bias_add_grad(dout) | |||||
| print(output.asnumpy()) | |||||
| # Copyright 2019 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| from mindspore.ops.operations import _grad_ops as G | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.bias_add_grad = G.BiasAddGrad() | |||||
| @ms_function | |||||
| def construct(self, dout): | |||||
| return self.bias_add_grad(dout) | |||||
| def test_net(): | |||||
| dout = np.random.rand(1, 1001).astype(np.float32) | |||||
| bias_add_grad = Net() | |||||
| output = bias_add_grad(dout) | |||||
| print(output.asnumpy()) | |||||
| @@ -1,44 +1,44 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(mode=context.GRAPH_MODE, | |||||
| device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.mask = P.DropoutGenMask(10, 28) | |||||
| self.shape = P.Shape() | |||||
| def construct(self, x, y): | |||||
| shape_x = self.shape(x) | |||||
| return self.mask(shape_x, y) | |||||
| x = np.ones([2, 4, 2, 2]).astype(np.int32) | |||||
| y = np.array([1.0]).astype(np.float32) | |||||
| def test_net(): | |||||
| mask = Net() | |||||
| tx, ty = Tensor(x), Tensor(y) | |||||
| output = mask(tx, ty) | |||||
| print(output.asnumpy()) | |||||
| assert ([255, 255, 255, 255] == output.asnumpy()).all() | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(mode=context.GRAPH_MODE, | |||||
| device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.mask = P.DropoutGenMask(10, 28) | |||||
| self.shape = P.Shape() | |||||
| def construct(self, x, y): | |||||
| shape_x = self.shape(x) | |||||
| return self.mask(shape_x, y) | |||||
| x = np.ones([2, 4, 2, 2]).astype(np.int32) | |||||
| y = np.array([1.0]).astype(np.float32) | |||||
| def test_net(): | |||||
| mask = Net() | |||||
| tx, ty = Tensor(x), Tensor(y) | |||||
| output = mask(tx, ty) | |||||
| print(output.asnumpy()) | |||||
| assert ([255, 255, 255, 255] == output.asnumpy()).all() | |||||
| @@ -1,51 +1,51 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| import pytest | |||||
| from mindspore.ops import operations as P | |||||
| from mindspore.nn import Cell | |||||
| from mindspore.common.tensor import Tensor | |||||
| from mindspore.train.model import Model | |||||
| from mindspore import log as logger | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Greater(Cell): | |||||
| def __init__(self): | |||||
| super(Greater, self).__init__() | |||||
| self.greater = P.Greater() | |||||
| def construct(self, inputa, inputb): | |||||
| return self.greater(inputa, inputb) | |||||
| def me_greater(inputa, inputb): | |||||
| net = Greater() | |||||
| net.set_train() | |||||
| model = Model(net) | |||||
| out = model.predict(inputa, inputb) | |||||
| logger.info("Check input a: ") | |||||
| logger.info(inputa) | |||||
| logger.info("Check input b: ") | |||||
| logger.info(inputb) | |||||
| return out.asnumpy() | |||||
| @pytest.mark.ssd_tbe | |||||
| def test_greater_2d_scalar0(): | |||||
| a = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | |||||
| b = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | |||||
| out_me = me_greater(Tensor(a), Tensor(b)) | |||||
| logger.info("Check me result:") | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| import pytest | |||||
| from mindspore.ops import operations as P | |||||
| from mindspore.nn import Cell | |||||
| from mindspore.common.tensor import Tensor | |||||
| from mindspore.train.model import Model | |||||
| from mindspore import log as logger | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Greater(Cell): | |||||
| def __init__(self): | |||||
| super(Greater, self).__init__() | |||||
| self.greater = P.Greater() | |||||
| def construct(self, inputa, inputb): | |||||
| return self.greater(inputa, inputb) | |||||
| def me_greater(inputa, inputb): | |||||
| net = Greater() | |||||
| net.set_train() | |||||
| model = Model(net) | |||||
| out = model.predict(inputa, inputb) | |||||
| logger.info("Check input a: ") | |||||
| logger.info(inputa) | |||||
| logger.info("Check input b: ") | |||||
| logger.info(inputb) | |||||
| return out.asnumpy() | |||||
| @pytest.mark.ssd_tbe | |||||
| def test_greater_2d_scalar0(): | |||||
| a = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | |||||
| b = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | |||||
| out_me = me_greater(Tensor(a), Tensor(b)) | |||||
| logger.info("Check me result:") | |||||
| logger.info(out_me) | logger.info(out_me) | ||||
| @@ -1,55 +1,55 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.nn import LayerNorm | |||||
| from mindspore.common.tensor import Tensor | |||||
| from mindspore.nn import Cell | |||||
| from mindspore.train.model import Model | |||||
| from mindspore import log as logger | |||||
| import pytest | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Net(Cell): | |||||
| def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | |||||
| super(Net, self).__init__() | |||||
| self.layernorm = LayerNorm(input_shape, begin_norm_axis, begin_params_axis, gamma, beta) | |||||
| def construct(self, input): | |||||
| x = self.layernorm(input) | |||||
| return x | |||||
| def pt_me_layernorm(input_data, normalized_shape, gamma, beta, axis): | |||||
| net = Net(normalized_shape, begin_norm_axis=axis, | |||||
| begin_params_axis=axis, | |||||
| gamma=Tensor(gamma), | |||||
| beta=Tensor(beta)) | |||||
| net.set_train() | |||||
| model = Model(net) | |||||
| out_me = model.predict(Tensor(input_data)) | |||||
| logger.info("Check me result:") | |||||
| logger.info(out_me.asnumpy()) | |||||
| @pytest.mark.lower_bs | |||||
| def test_normal_layernorm_1_128_1024_axis_2(): | |||||
| """ | |||||
| 2 input[1, 128, 1024],normalized_shape=[128, 1024] | |||||
| """ | |||||
| input_data = np.random.randn(1, 128, 1024).astype(np.float32) | |||||
| gamma = np.random.randn(1024).astype(np.float32) | |||||
| gamma.fill(1.1) | |||||
| beta = np.random.randn(1024).astype(np.float32) | |||||
| beta.fill(0.1) | |||||
| pt_me_layernorm(input_data, (1024, ), gamma, beta, 2) | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.nn import LayerNorm | |||||
| from mindspore.common.tensor import Tensor | |||||
| from mindspore.nn import Cell | |||||
| from mindspore.train.model import Model | |||||
| from mindspore import log as logger | |||||
| import pytest | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Net(Cell): | |||||
| def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | |||||
| super(Net, self).__init__() | |||||
| self.layernorm = LayerNorm(input_shape, begin_norm_axis, begin_params_axis, gamma, beta) | |||||
| def construct(self, input): | |||||
| x = self.layernorm(input) | |||||
| return x | |||||
| def pt_me_layernorm(input_data, normalized_shape, gamma, beta, axis): | |||||
| net = Net(normalized_shape, begin_norm_axis=axis, | |||||
| begin_params_axis=axis, | |||||
| gamma=Tensor(gamma), | |||||
| beta=Tensor(beta)) | |||||
| net.set_train() | |||||
| model = Model(net) | |||||
| out_me = model.predict(Tensor(input_data)) | |||||
| logger.info("Check me result:") | |||||
| logger.info(out_me.asnumpy()) | |||||
| @pytest.mark.lower_bs | |||||
| def test_normal_layernorm_1_128_1024_axis_2(): | |||||
| """ | |||||
| 2 input[1, 128, 1024],normalized_shape=[128, 1024] | |||||
| """ | |||||
| input_data = np.random.randn(1, 128, 1024).astype(np.float32) | |||||
| gamma = np.random.randn(1024).astype(np.float32) | |||||
| gamma.fill(1.1) | |||||
| beta = np.random.randn(1024).astype(np.float32) | |||||
| beta.fill(0.1) | |||||
| pt_me_layernorm(input_data, (1024, ), gamma, beta, 2) | |||||
| @@ -1,65 +1,65 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.nn import LayerNorm | |||||
| from mindspore.common.tensor import Tensor | |||||
| from mindspore.nn import Cell | |||||
| from mindspore.ops.composite import GradOperation | |||||
| from mindspore import log as logger | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Grad(Cell): | |||||
| def __init__(self, network): | |||||
| super(Grad, self).__init__() | |||||
| self.grad = GradOperation(name="get_all", get_all=True, sens_param=True) | |||||
| self.network = network | |||||
| def construct(self, input, output_grad,): | |||||
| gout = self.grad(self.network)(input, output_grad) | |||||
| return gout | |||||
| class Net(Cell): | |||||
| def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | |||||
| super(Net, self).__init__() | |||||
| self.layernorm = LayerNorm(input_shape, begin_norm_axis, begin_params_axis, gamma, beta) | |||||
| def construct(self, input): | |||||
| x = self.layernorm(input) | |||||
| return x | |||||
| def py_me_layernorm_grad(input_data, normalized_shape, gamma, beta, axis, gradients): | |||||
| input_me = Tensor(input_data) | |||||
| net_me = Grad(Net(normalized_shape, begin_norm_axis=axis, | |||||
| begin_params_axis=axis, | |||||
| gamma=Tensor(gamma), | |||||
| beta=Tensor(beta))) | |||||
| net_me.set_train() | |||||
| out_pool_grad_me = Tensor(gradients) | |||||
| out_grad = net_me(input_me, out_pool_grad_me) | |||||
| logger.info("Check me result:") | |||||
| logger.info(out_grad.asnumpy()) | |||||
| def test_normal_layernorm_grad_normalize_2d(): | |||||
| """ | |||||
| 1 input[1, 128, 1024],normalized_shape=[1024],element_affine=False | |||||
| """ | |||||
| input_data = np.ones([1, 128, 1024]).astype(np.float32) | |||||
| gradients = np.ones((1, 128, 1024)).astype(np.float32) | |||||
| gamma = np.random.randn(1024).astype(np.float32) | |||||
| gamma.fill(1.1) | |||||
| beta = np.random.randn(1024).astype(np.float32) | |||||
| beta.fill(0.1) | |||||
| py_me_layernorm_grad(input_data, (1024,), gamma, beta, 2, gradients) | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.nn import LayerNorm | |||||
| from mindspore.common.tensor import Tensor | |||||
| from mindspore.nn import Cell | |||||
| from mindspore.ops.composite import GradOperation | |||||
| from mindspore import log as logger | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| class Grad(Cell): | |||||
| def __init__(self, network): | |||||
| super(Grad, self).__init__() | |||||
| self.grad = GradOperation(name="get_all", get_all=True, sens_param=True) | |||||
| self.network = network | |||||
| def construct(self, input, output_grad,): | |||||
| gout = self.grad(self.network)(input, output_grad) | |||||
| return gout | |||||
| class Net(Cell): | |||||
| def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | |||||
| super(Net, self).__init__() | |||||
| self.layernorm = LayerNorm(input_shape, begin_norm_axis, begin_params_axis, gamma, beta) | |||||
| def construct(self, input): | |||||
| x = self.layernorm(input) | |||||
| return x | |||||
| def py_me_layernorm_grad(input_data, normalized_shape, gamma, beta, axis, gradients): | |||||
| input_me = Tensor(input_data) | |||||
| net_me = Grad(Net(normalized_shape, begin_norm_axis=axis, | |||||
| begin_params_axis=axis, | |||||
| gamma=Tensor(gamma), | |||||
| beta=Tensor(beta))) | |||||
| net_me.set_train() | |||||
| out_pool_grad_me = Tensor(gradients) | |||||
| out_grad = net_me(input_me, out_pool_grad_me) | |||||
| logger.info("Check me result:") | |||||
| logger.info(out_grad.asnumpy()) | |||||
| def test_normal_layernorm_grad_normalize_2d(): | |||||
| """ | |||||
| 1 input[1, 128, 1024],normalized_shape=[1024],element_affine=False | |||||
| """ | |||||
| input_data = np.ones([1, 128, 1024]).astype(np.float32) | |||||
| gradients = np.ones((1, 128, 1024)).astype(np.float32) | |||||
| gamma = np.random.randn(1024).astype(np.float32) | |||||
| gamma.fill(1.1) | |||||
| beta = np.random.randn(1024).astype(np.float32) | |||||
| beta.fill(0.1) | |||||
| py_me_layernorm_grad(input_data, (1024,), gamma, beta, 2, gradients) | |||||
| @@ -1,39 +1,39 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.logical_and = P.LogicalAnd() | |||||
| @ms_function | |||||
| def construct(self, x1, x2): | |||||
| return self.logical_and(x1, x2) | |||||
| x1 = [True, True, False, False, True, True, False, False] | |||||
| x2 = [True, False, False, True, True, False, False, True] | |||||
| def test_net(): | |||||
| logical_and = Net() | |||||
| output = logical_and(Tensor(x1), Tensor(x2)) | |||||
| print(x1) | |||||
| print(x2) | |||||
| print(output.asnumpy()) | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.logical_and = P.LogicalAnd() | |||||
| @ms_function | |||||
| def construct(self, x1, x2): | |||||
| return self.logical_and(x1, x2) | |||||
| x1 = [True, True, False, False, True, True, False, False] | |||||
| x2 = [True, False, False, True, True, False, False, True] | |||||
| def test_net(): | |||||
| logical_and = Net() | |||||
| output = logical_and(Tensor(x1), Tensor(x2)) | |||||
| print(x1) | |||||
| print(x2) | |||||
| print(output.asnumpy()) | |||||
| @@ -1,38 +1,38 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.logical_not = P.LogicalNot() | |||||
| @ms_function | |||||
| def construct(self, x1): | |||||
| return self.logical_not(x1) | |||||
| x1 = [True, True, False, False, True, True, False, False] | |||||
| def test_net(): | |||||
| logical_not = Net() | |||||
| output = logical_not(Tensor(x1)) | |||||
| print(x1) | |||||
| print(output.asnumpy()) | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.logical_not = P.LogicalNot() | |||||
| @ms_function | |||||
| def construct(self, x1): | |||||
| return self.logical_not(x1) | |||||
| x1 = [True, True, False, False, True, True, False, False] | |||||
| def test_net(): | |||||
| logical_not = Net() | |||||
| output = logical_not(Tensor(x1)) | |||||
| print(x1) | |||||
| print(output.asnumpy()) | |||||
| @@ -1,39 +1,39 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.logical_or = P.LogicalOr() | |||||
| @ms_function | |||||
| def construct(self, x1, x2): | |||||
| return self.logical_or(x1, x2) | |||||
| x1 = [True, True, False, False, True, True, False, False] | |||||
| x2 = [True, False, False, True, True, False, False, True] | |||||
| def test_net(): | |||||
| logical_or = Net() | |||||
| output = logical_or(Tensor(x1), Tensor(x2)) | |||||
| print(x1) | |||||
| print(x2) | |||||
| print(output.asnumpy()) | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | |||||
| import numpy as np | |||||
| import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | |||||
| class Net(nn.Cell): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.logical_or = P.LogicalOr() | |||||
| @ms_function | |||||
| def construct(self, x1, x2): | |||||
| return self.logical_or(x1, x2) | |||||
| x1 = [True, True, False, False, True, True, False, False] | |||||
| x2 = [True, False, False, True, True, False, False, True] | |||||
| def test_net(): | |||||
| logical_or = Net() | |||||
| output = logical_or(Tensor(x1), Tensor(x2)) | |||||
| print(x1) | |||||
| print(x2) | |||||
| print(output.asnumpy()) | |||||