| @@ -20,18 +20,23 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| context.set_context(enable_task_sink=True) | context.set_context(enable_task_sink=True) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.add = P.TensorAdd() | self.add = P.TensorAdd() | ||||
| def construct(self, x, y): | def construct(self, x, y): | ||||
| return self.add(x, y) | return self.add(x, y) | ||||
| x = np.ones([1,3,3,4]).astype(np.float32) | |||||
| y = np.ones([1,3,3,4]).astype(np.float32) | |||||
| x = np.ones([1, 3, 3, 4]).astype(np.float32) | |||||
| y = np.ones([1, 3, 3, 4]).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| add = Net() | add = Net() | ||||
| @@ -20,15 +20,19 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.add = P.AddN() | self.add = P.AddN() | ||||
| def construct(self, x, y): | def construct(self, x, y): | ||||
| return self.add((x, y)) | return self.add((x, y)) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(1, 3, 3, 4).astype(np.float32) | x = np.random.randn(1, 3, 3, 4).astype(np.float32) | ||||
| y = np.random.randn(1, 3, 3, 4).astype(np.float32) | y = np.random.randn(1, 3, 3, 4).astype(np.float32) | ||||
| @@ -18,97 +18,110 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.expand_dims = P.ExpandDims() | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.expand_dims = P.ExpandDims() | |||||
| def construct(self, tensor, dim): | |||||
| return self.expand_dims(tensor, dim) | |||||
| def construct(self, tensor, dim): | |||||
| return self.expand_dims(tensor, dim) | |||||
| def test_net_bool(): | def test_net_bool(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_int8(): | def test_net_int8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_uint8(): | def test_net_uint8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_int16(): | def test_net_int16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_uint16(): | def test_net_uint16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_int32(): | def test_net_int32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_uint32(): | def test_net_uint32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_int64(): | def test_net_int64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_uint64(): | def test_net_uint64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_float16(): | def test_net_float16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_float32(): | def test_net_float32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| def test_net_float64(): | def test_net_float64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x), -1) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.expand_dims(x, -1))) | |||||
| @@ -17,83 +17,94 @@ from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.flatten = P.Flatten() | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.flatten = P.Flatten() | |||||
| def construct(self, tensor): | |||||
| return self.flatten(tensor) | |||||
| def construct(self, tensor): | |||||
| return self.flatten(tensor) | |||||
| def test_net_int8(): | def test_net_int8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_uint8(): | def test_net_uint8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_int16(): | def test_net_int16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_uint16(): | def test_net_uint16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_int32(): | def test_net_int32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_uint32(): | def test_net_uint32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_int64(): | def test_net_int64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_uint64(): | def test_net_uint64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_float16(): | def test_net_float16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| def test_net_float32(): | def test_net_float32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.flatten())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.flatten())) | |||||
| @@ -18,97 +18,110 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.isfinite = P.IsFinite() | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.isfinite = P.IsFinite() | |||||
| def construct(self, tensor): | |||||
| return self.isfinite(tensor) | |||||
| def construct(self, tensor): | |||||
| return self.isfinite(tensor) | |||||
| def test_net_bool(): | def test_net_bool(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_int8(): | def test_net_int8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_uint8(): | def test_net_uint8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_int16(): | def test_net_int16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_uint16(): | def test_net_uint16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_int32(): | def test_net_int32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_uint32(): | def test_net_uint32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_int64(): | def test_net_int64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_uint64(): | def test_net_uint64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_float16(): | def test_net_float16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_float32(): | def test_net_float32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| def test_net_float64(): | def test_net_float64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.isfinite(x))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.isfinite(x))) | |||||
| @@ -18,97 +18,110 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.reshape = P.Reshape() | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.reshape = P.Reshape() | |||||
| def construct(self, tensor): | |||||
| return self.reshape(tensor, (4,4)) | |||||
| def construct(self, tensor): | |||||
| return self.reshape(tensor, (4, 4)) | |||||
| def test_net_bool(): | def test_net_bool(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_int8(): | def test_net_int8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_uint8(): | def test_net_uint8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_int16(): | def test_net_int16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_uint16(): | def test_net_uint16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_int32(): | def test_net_int32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_uint32(): | def test_net_uint32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_int64(): | def test_net_int64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_uint64(): | def test_net_uint64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_float16(): | def test_net_float16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_float32(): | def test_net_float32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| def test_net_float64(): | def test_net_float64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == np.reshape(x, (4,4)))) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == np.reshape(x, (4, 4)))) | |||||
| @@ -17,97 +17,110 @@ from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | context.set_context(mode=context.PYNATIVE_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.squeeze = P.Squeeze() | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.squeeze = P.Squeeze() | |||||
| def construct(self, tensor): | |||||
| return self.squeeze(tensor) | |||||
| def construct(self, tensor): | |||||
| return self.squeeze(tensor) | |||||
| def test_net_bool(): | def test_net_bool(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.bool) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_int8(): | def test_net_int8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_uint8(): | def test_net_uint8(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint8) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_int16(): | def test_net_int16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_uint16(): | def test_net_uint16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_int32(): | def test_net_int32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_uint32(): | def test_net_uint32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_int64(): | def test_net_int64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.int64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_uint64(): | def test_net_uint64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.uint64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_float16(): | def test_net_float16(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_float32(): | def test_net_float32(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float32) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| def test_net_float64(): | def test_net_float64(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert(np.all(output.asnumpy() == x.squeeze())) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float64) | |||||
| net = Net() | |||||
| output = net(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| assert (np.all(output.asnumpy() == x.squeeze())) | |||||
| @@ -20,24 +20,29 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0) | self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0) | ||||
| self.variable = Parameter(initializer( | self.variable = Parameter(initializer( | ||||
| 'normal', [2, 3, 3, 4]), name='variable') | |||||
| 'normal', [2, 3, 3, 4]), name='variable') | |||||
| self.accumulation = Parameter(initializer( | self.accumulation = Parameter(initializer( | ||||
| 'normal', [2, 3, 3, 4]), name='accumulation') | |||||
| 'normal', [2, 3, 3, 4]), name='accumulation') | |||||
| self.learning_rate = Parameter(initializer( | self.learning_rate = Parameter(initializer( | ||||
| 'normal', [1, ]), name='learning_rate') | |||||
| 'normal', [1, ]), name='learning_rate') | |||||
| self.gradient = Parameter(initializer( | self.gradient = Parameter(initializer( | ||||
| 'normal', [2, 3, 3, 4]), name='gradient') | |||||
| 'normal', [2, 3, 3, 4]), name='gradient') | |||||
| self.momentum = Parameter(initializer( | self.momentum = Parameter(initializer( | ||||
| 'normal', [1, ]), name='momentum') | |||||
| 'normal', [1, ]), name='momentum') | |||||
| def construct(self): | def construct(self): | ||||
| return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum) | return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum) | ||||
| def test_net(): | def test_net(): | ||||
| apply_momentum = Net() | apply_momentum = Net() | ||||
| output = apply_momentum() | output = apply_momentum() | ||||
| @@ -21,22 +21,25 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.bias_add_grad = G.BiasAddGrad() | |||||
| #self.dout = Parameter(initializer( | |||||
| #'normal', [2, 3, 3, 4]), name='dout') | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.bias_add_grad = G.BiasAddGrad() | |||||
| # self.dout = Parameter(initializer( | |||||
| # 'normal', [2, 3, 3, 4]), name='dout') | |||||
| @ms_function | |||||
| def construct(self, dout): | |||||
| return self.bias_add_grad(dout) | |||||
| @ms_function | |||||
| def construct(self, dout): | |||||
| return self.bias_add_grad(dout) | |||||
| dout = np.ones([2,3,4,4]).astype(np.float32) | |||||
| dout = np.ones([2, 3, 4, 4]).astype(np.float32) | |||||
| bias_add_grad = Net() | bias_add_grad = Net() | ||||
| output = bias_add_grad(Tensor(dout)) | output = bias_add_grad(Tensor(dout)) | ||||
| expect_output = np.array([32.,32.,32.]).astype(np.float32) | |||||
| assert np.all(output.asnumpy()==expect_output), "bias_add_grad execute failed, please check current code commit" | |||||
| expect_output = np.array([32., 32., 32.]).astype(np.float32) | |||||
| assert np.all(output.asnumpy() == expect_output), "bias_add_grad execute failed, please check current code commit" | |||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -21,17 +21,20 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.bias_add_grad = G.BiasAddGrad() | self.bias_add_grad = G.BiasAddGrad() | ||||
| @ms_function | @ms_function | ||||
| def construct(self, dout): | def construct(self, dout): | ||||
| return self.bias_add_grad(dout) | return self.bias_add_grad(dout) | ||||
| def test_net(): | def test_net(): | ||||
| dout = np.random.rand(1, 1001).astype(np.float32) | dout = np.random.rand(1, 1001).astype(np.float32) | ||||
| bias_add_grad = Net() | bias_add_grad = Net() | ||||
| @@ -20,32 +20,33 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| out_channel = 64 | out_channel = 64 | ||||
| kernel_size = 7 | kernel_size = 7 | ||||
| self.conv = P.Conv2D(out_channel, | self.conv = P.Conv2D(out_channel, | ||||
| kernel_size, | |||||
| mode=1, | |||||
| pad_mode="valid", | |||||
| pad=0, | |||||
| stride=1, | |||||
| dilation=1, | |||||
| group=1) | |||||
| kernel_size, | |||||
| mode=1, | |||||
| pad_mode="valid", | |||||
| pad=0, | |||||
| stride=1, | |||||
| dilation=1, | |||||
| group=1) | |||||
| self.w = Parameter(initializer( | self.w = Parameter(initializer( | ||||
| 'normal', [64, 3, 7, 7]), name='w') | |||||
| 'normal', [64, 3, 7, 7]), name='w') | |||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.conv(x, self.w) | return self.conv(x, self.w) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32,3,224,224).astype(np.float32) | |||||
| x = np.random.randn(32, 3, 224, 224).astype(np.float32) | |||||
| conv = Net() | conv = Net() | ||||
| output = conv(Tensor(x)) | output = conv(Tensor(x)) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -21,37 +21,40 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.conv2d_grad = G.Conv2DBackpropFilter(4,1) | |||||
| yt = Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)) | |||||
| self.y = Parameter(yt, name='y') | |||||
| self.get_shape = P.Shape() | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.conv2d_grad = G.Conv2DBackpropFilter(4, 1) | |||||
| yt = Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)) | |||||
| self.y = Parameter(yt, name='y') | |||||
| self.get_shape = P.Shape() | |||||
| @ms_function | |||||
| def construct(self, x, out): | |||||
| return self.conv2d_grad(out, x, self.get_shape(self.y)) | |||||
| @ms_function | |||||
| def construct(self, x, out): | |||||
| return self.conv2d_grad(out, x, self.get_shape(self.y)) | |||||
| x = Tensor(np.array([[[ | x = Tensor(np.array([[[ | ||||
| [3, 0, 1, 2, 7, 4], | |||||
| [1, 5, 8, 9, 3, 1], | |||||
| [2, 7, 2, 5, 1, 3], | |||||
| [0, 1, 3, 1, 7, 8], | |||||
| [4, 2, 1, 6, 2, 8], | |||||
| [2, 4, 5, 2, 3, 9]]]]).astype(np.float32)) | |||||
| [3, 0, 1, 2, 7, 4], | |||||
| [1, 5, 8, 9, 3, 1], | |||||
| [2, 7, 2, 5, 1, 3], | |||||
| [0, 1, 3, 1, 7, 8], | |||||
| [4, 2, 1, 6, 2, 8], | |||||
| [2, 4, 5, 2, 3, 9]]]]).astype(np.float32)) | |||||
| out = Tensor(np.array([[[ | out = Tensor(np.array([[[ | ||||
| [ -5, -4, 0, 8], | |||||
| [-10, -2, 2, 3], | |||||
| [ 0, -2, -4, -7], | |||||
| [ -3, -2, -3, -16]]]]).astype(np.float32)) | |||||
| [-5, -4, 0, 8], | |||||
| [-10, -2, 2, 3], | |||||
| [0, -2, -4, -7], | |||||
| [-3, -2, -3, -16]]]]).astype(np.float32)) | |||||
| operator = Net() | operator = Net() | ||||
| output = operator(x, out) | output = operator(x, out) | ||||
| expect_out = np.array([[[[ -60., -142., -265.],[-104., -211., -322.],[-102., -144., -248.]]]]).astype(np.float32) | |||||
| expect_out = np.array([[[[-60., -142., -265.], [-104., -211., -322.], [-102., -144., -248.]]]]).astype(np.float32) | |||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| print(expect_out) | print(expect_out) | ||||
| assert np.all(output.asnumpy()==expect_out), "conv2d_grad execute failed, please check current code commit" | |||||
| assert np.all(output.asnumpy() == expect_out), "conv2d_grad execute failed, please check current code commit" | |||||
| @@ -21,8 +21,10 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,26 +35,28 @@ class Grad(nn.Cell): | |||||
| def construct(self, input, output_grad): | def construct(self, input, output_grad): | ||||
| return self.grad(self.network)(input, output_grad) | return self.grad(self.network)(input, output_grad) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| out_channel = 512 | out_channel = 512 | ||||
| kernel_size = 2048 | kernel_size = 2048 | ||||
| self.conv = P.Conv2D(out_channel, | self.conv = P.Conv2D(out_channel, | ||||
| (kernel_size, kernel_size), | |||||
| mode=1, | |||||
| pad_mode="same", | |||||
| pad=3, | |||||
| stride=2, | |||||
| dilation=1, | |||||
| group=1) | |||||
| (kernel_size, kernel_size), | |||||
| mode=1, | |||||
| pad_mode="same", | |||||
| pad=3, | |||||
| stride=2, | |||||
| dilation=1, | |||||
| group=1) | |||||
| self.w = Parameter(initializer( | self.w = Parameter(initializer( | ||||
| 'normal', [512, 2048, 1, 1]), name='w') | |||||
| 'normal', [512, 2048, 1, 1]), name='w') | |||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.conv(x, self.w) | return self.conv(x, self.w) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.ones([32, 2048, 7, 7]).astype(np.float32) | x = np.ones([32, 2048, 7, 7]).astype(np.float32) | ||||
| sens = np.ones([32, 512, 7, 7]).astype(np.float32) | sens = np.ones([32, 512, 7, 7]).astype(np.float32) | ||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,6 +33,7 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.dense(x) | return self.dense(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32, 2048).astype(np.float32) | x = np.random.randn(32, 2048).astype(np.float32) | ||||
| net = Net() | net = Net() | ||||
| @@ -21,8 +21,10 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,6 +35,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, input, output_grad): | def construct(self, input, output_grad): | ||||
| return self.grad(self.network)(input, output_grad) | return self.grad(self.network)(input, output_grad) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -41,6 +44,7 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.dense(x) | return self.dense(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32, 2048).astype(np.float32) | x = np.random.randn(32, 2048).astype(np.float32) | ||||
| sens = np.random.randn(32, 1001).astype(np.float32) | sens = np.random.randn(32, 1001).astype(np.float32) | ||||
| @@ -17,6 +17,7 @@ from mindspore.ops import operations as P | |||||
| import mindspore.nn as nn | import mindspore.nn as nn | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.GRAPH_MODE, | context.set_context(mode=context.GRAPH_MODE, | ||||
| device_target="Ascend") | device_target="Ascend") | ||||
| @@ -21,6 +21,7 @@ import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -35,7 +38,7 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(1,64,112,112).astype(np.float32) | |||||
| x = np.random.randn(1, 64, 112, 112).astype(np.float32) | |||||
| # mean = np.random.randn(1,16,1,1).astype(np.float32) | # mean = np.random.randn(1,16,1,1).astype(np.float32) | ||||
| # variance = np.random.randn(1,16,1,1).astype(np.float32) | # variance = np.random.randn(1,16,1,1).astype(np.float32) | ||||
| fusedBn = Net() | fusedBn = Net() | ||||
| @@ -45,4 +48,3 @@ def test_net(): | |||||
| print("***********output y*********") | print("***********output y*********") | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -21,8 +21,11 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| #context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| # context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,6 +36,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, input, output_grad): | def construct(self, input, output_grad): | ||||
| return self.grad(self.network)(input, output_grad) | return self.grad(self.network)(input, output_grad) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -47,8 +51,8 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(1,64,112,112).astype(np.float32) | |||||
| sens = np.random.randn(1,64,112,112).astype(np.float32) | |||||
| x = np.random.randn(1, 64, 112, 112).astype(np.float32) | |||||
| sens = np.random.randn(1, 64, 112, 112).astype(np.float32) | |||||
| net = Grad(Net()) | net = Grad(Net()) | ||||
| output = net(Tensor(x), Tensor(sens)) | output = net(Tensor(x), Tensor(sens)) | ||||
| print("***********x*********") | print("***********x*********") | ||||
| @@ -20,6 +20,8 @@ from mindspore import Tensor | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -31,32 +33,32 @@ class Net(nn.Cell): | |||||
| def test_image_gradients(): | def test_image_gradients(): | ||||
| image = Tensor(np.array([[[[1,2],[3,4]]]]), dtype=mstype.int32) | |||||
| expected_dy = np.array([[[[2,2],[0,0]]]]).astype(np.int32) | |||||
| expected_dx = np.array([[[[1,0],[1,0]]]]).astype(np.int32) | |||||
| image = Tensor(np.array([[[[1, 2], [3, 4]]]]), dtype=mstype.int32) | |||||
| expected_dy = np.array([[[[2, 2], [0, 0]]]]).astype(np.int32) | |||||
| expected_dx = np.array([[[[1, 0], [1, 0]]]]).astype(np.int32) | |||||
| net = Net() | net = Net() | ||||
| dy, dx = net(image) | dy, dx = net(image) | ||||
| assert np.any(dx.asnumpy()-expected_dx) == False | |||||
| assert np.any(dy.asnumpy()-expected_dy) == False | |||||
| assert np.any(dx.asnumpy() - expected_dx) == False | |||||
| assert np.any(dy.asnumpy() - expected_dy) == False | |||||
| def test_image_gradients_multi_channel_depth(): | def test_image_gradients_multi_channel_depth(): | ||||
| # 4 x 2 x 2 x 2 | # 4 x 2 x 2 x 2 | ||||
| dtype = mstype.int32 | dtype = mstype.int32 | ||||
| image = Tensor(np.array([[[[1,2],[3,4]], [[5,6],[7,8]]], | |||||
| [[[3,5],[7,9]], [[11,13],[15,17]]], | |||||
| [[[5,10],[15,20]], [[25,30],[35,40]]], | |||||
| [[[10,20],[30,40]], [[50,60],[70,80]]]]), dtype=dtype) | |||||
| expected_dy = Tensor(np.array([[[[2,2],[0,0]], [[2,2],[0,0]]], | |||||
| [[[4,4],[0,0]], [[4,4],[0,0]]], | |||||
| [[[10,10],[0,0]], [[10,10],[0,0]]], | |||||
| [[[20,20],[0,0]], [[20,20],[0,0]]]]), dtype=dtype) | |||||
| expected_dx = Tensor(np.array([[[[1,0],[1,0]], [[1,0],[1,0]]], | |||||
| [[[2,0],[2,0]], [[2,0],[2,0]]], | |||||
| [[[5,0],[5,0]], [[5,0],[5,0]]], | |||||
| [[[10,0],[10,0]], [[10,0],[10,0]]]]), dtype=dtype) | |||||
| image = Tensor(np.array([[[[1, 2], [3, 4]], [[5, 6], [7, 8]]], | |||||
| [[[3, 5], [7, 9]], [[11, 13], [15, 17]]], | |||||
| [[[5, 10], [15, 20]], [[25, 30], [35, 40]]], | |||||
| [[[10, 20], [30, 40]], [[50, 60], [70, 80]]]]), dtype=dtype) | |||||
| expected_dy = Tensor(np.array([[[[2, 2], [0, 0]], [[2, 2], [0, 0]]], | |||||
| [[[4, 4], [0, 0]], [[4, 4], [0, 0]]], | |||||
| [[[10, 10], [0, 0]], [[10, 10], [0, 0]]], | |||||
| [[[20, 20], [0, 0]], [[20, 20], [0, 0]]]]), dtype=dtype) | |||||
| expected_dx = Tensor(np.array([[[[1, 0], [1, 0]], [[1, 0], [1, 0]]], | |||||
| [[[2, 0], [2, 0]], [[2, 0], [2, 0]]], | |||||
| [[[5, 0], [5, 0]], [[5, 0], [5, 0]]], | |||||
| [[[10, 0], [10, 0]], [[10, 0], [10, 0]]]]), dtype=dtype) | |||||
| net = Net() | net = Net() | ||||
| dy, dx = net(image) | dy, dx = net(image) | ||||
| assert np.any(dx.asnumpy()-expected_dx.asnumpy()) == False | |||||
| assert np.any(dy.asnumpy()-expected_dy.asnumpy()) == False | |||||
| assert np.any(dx.asnumpy() - expected_dx.asnumpy()) == False | |||||
| assert np.any(dy.asnumpy() - expected_dy.asnumpy()) == False | |||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,8 +33,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.matmul(x1, x2) | return self.matmul(x1, x2) | ||||
| x1 = np.random.randn(1,3).astype(np.float32) | |||||
| x2 = np.random.randn(3,4).astype(np.float32) | |||||
| x1 = np.random.randn(1, 3).astype(np.float32) | |||||
| x2 = np.random.randn(3, 4).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| matmul = Net() | matmul = Net() | ||||
| @@ -20,12 +20,13 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.maxpool = P.MaxPool(pad_mode="SAME", window=3, stride=2) | self.maxpool = P.MaxPool(pad_mode="SAME", window=3, stride=2) | ||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| output = self.maxpool(x) | output = self.maxpool(x) | ||||
| @@ -33,7 +34,7 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32,64,112,112).astype(np.float32) | |||||
| x = np.random.randn(32, 64, 112, 112).astype(np.float32) | |||||
| maxpool = Net() | maxpool = Net() | ||||
| output = maxpool(Tensor(x)) | output = maxpool(Tensor(x)) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -19,6 +19,7 @@ from mindspore.common.api import ms_function | |||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -21,8 +21,10 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,6 +35,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, input, output_grad): | def construct(self, input, output_grad): | ||||
| return self.grad(self.network)(input, output_grad) | return self.grad(self.network)(input, output_grad) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -43,8 +46,9 @@ class Net(nn.Cell): | |||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| output = self.maxpool(x) | |||||
| return output[0] | |||||
| output = self.maxpool(x) | |||||
| return output[0] | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32, 64, 112, 112).astype(np.float32) | x = np.random.randn(32, 64, 112, 112).astype(np.float32) | ||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,8 +33,9 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.relu(x) | return self.relu(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(2,3,3,4).astype(np.float32) | |||||
| x = np.random.randn(2, 3, 3, 4).astype(np.float32) | |||||
| relu = Net() | relu = Net() | ||||
| output = relu(Tensor(x)) | output = relu(Tensor(x)) | ||||
| print(x) | print(x) | ||||
| @@ -21,8 +21,10 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,6 +35,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, input, output_grad): | def construct(self, input, output_grad): | ||||
| return self.grad(self.network)(input, output_grad) | return self.grad(self.network)(input, output_grad) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -41,9 +44,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.relu(x) | return self.relu(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(2,3,3,4).astype(np.float32) | |||||
| sens = np.random.randn(2,3,3,4).astype(np.float32) | |||||
| x = np.random.randn(2, 3, 3, 4).astype(np.float32) | |||||
| sens = np.random.randn(2, 3, 3, 4).astype(np.float32) | |||||
| net = Grad(Net()) | net = Grad(Net()) | ||||
| output = net(Tensor(x), Tensor(sens)) | output = net(Tensor(x), Tensor(sens)) | ||||
| print(len(output)) | print(len(output)) | ||||
| @@ -18,18 +18,22 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.reshape = P.Reshape() | |||||
| @ms_function | |||||
| def construct(self, tensor): | |||||
| return self.reshape(tensor, (1,16)) | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | |||||
| self.reshape = P.Reshape() | |||||
| @ms_function | |||||
| def construct(self, tensor): | |||||
| return self.reshape(tensor, (1, 16)) | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| reshape = Net() | |||||
| output = reshape(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| x = np.random.randn(1, 16, 1, 1).astype(np.float16) | |||||
| reshape = Net() | |||||
| output = reshape(Tensor(x)) | |||||
| print(output.asnumpy()) | |||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -29,7 +32,8 @@ class Net(nn.Cell): | |||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.simplemean(x, (-2, -1)) | return self.simplemean(x, (-2, -1)) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32, 2048, 7, 7).astype(np.float32) | x = np.random.randn(32, 2048, 7, 7).astype(np.float32) | ||||
| simplemean = Net() | simplemean = Net() | ||||
| @@ -21,8 +21,10 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,6 +35,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, input, output_grad): | def construct(self, input, output_grad): | ||||
| return self.grad(self.network)(input, output_grad) | return self.grad(self.network)(input, output_grad) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -41,9 +44,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.simplemean(x, (-2, -1)) | return self.simplemean(x, (-2, -1)) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32,2048,7,7).astype(np.float32) | |||||
| sens = np.random.randn(32,2048, 1, 1).astype(np.float32) | |||||
| x = np.random.randn(32, 2048, 7, 7).astype(np.float32) | |||||
| sens = np.random.randn(32, 2048, 1, 1).astype(np.float32) | |||||
| net = Grad(Net()) | net = Grad(Net()) | ||||
| output = net(Tensor(x), Tensor(sens)) | output = net(Tensor(x), Tensor(sens)) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,6 +18,7 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -30,9 +31,10 @@ class Net(nn.Cell): | |||||
| def construct(self, features, labels): | def construct(self, features, labels): | ||||
| return self.SparseSoftmaxCrossEntropyWithLogits(features, labels) | return self.SparseSoftmaxCrossEntropyWithLogits(features, labels) | ||||
| def np_sparse_softmax_cross_entropy_with_logits(labels_shape, logits_shape, logits_dtype): | def np_sparse_softmax_cross_entropy_with_logits(labels_shape, logits_shape, logits_dtype): | ||||
| num_class = logits_shape[1] | num_class = logits_shape[1] | ||||
| labels = np.random.randint(low=0, high=num_class - 1, size=labels_shape).astype(np.int32) | |||||
| labels = np.random.randint(low=0, high=num_class - 1, size=labels_shape).astype(np.int32) | |||||
| logits = np.random.rand(*logits_shape).astype(logits_dtype) | logits = np.random.rand(*logits_shape).astype(logits_dtype) | ||||
| features = logits | features = logits | ||||
| features_reshape = np.reshape(features, [-1, num_class]) | features_reshape = np.reshape(features, [-1, num_class]) | ||||
| @@ -48,7 +50,7 @@ def np_sparse_softmax_cross_entropy_with_logits(labels_shape, logits_shape, logi | |||||
| loss = -np.sum(labels_mat * np.log(probs + 1.0e-20), axis=1) | loss = -np.sum(labels_mat * np.log(probs + 1.0e-20), axis=1) | ||||
| bp_res = np.reshape(bp, features.shape) | bp_res = np.reshape(bp, features.shape) | ||||
| loss_res = np.reshape(loss, labels.shape) | loss_res = np.reshape(loss, labels.shape) | ||||
| loss_res = np.sum(loss_res, axis=0)/loss_res.shape[0] | |||||
| loss_res = np.sum(loss_res, axis=0) / loss_res.shape[0] | |||||
| return labels, logits, loss_res, bp_res | return labels, logits, loss_res, bp_res | ||||
| @@ -65,4 +67,6 @@ def test_net(): | |||||
| print(loss_me.asnumpy().flatten()) | print(loss_me.asnumpy().flatten()) | ||||
| print("-------------------------") | print("-------------------------") | ||||
| print(expect) | print(expect) | ||||
| test_net() | test_net() | ||||
| @@ -21,6 +21,7 @@ import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self, is_grad=False): | def __init__(self, is_grad=False): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -20,11 +20,13 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| """Net definition""" | """Net definition""" | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.AssignAdd = P.AssignAdd() | self.AssignAdd = P.AssignAdd() | ||||
| @@ -39,8 +41,8 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| """test AssignAdd""" | """test AssignAdd""" | ||||
| net = Net() | net = Net() | ||||
| x = Tensor(np.ones([1]).astype(np.float32)*100) | |||||
| x = Tensor(np.ones([1]).astype(np.float32) * 100) | |||||
| print("MyPrintResult dataX:", x) | print("MyPrintResult dataX:", x) | ||||
| result = net(x) | result = net(x) | ||||
| print("MyPrintResult data::", result.asnumpy()) | |||||
| print("MyPrintResult data::", result.asnumpy()) | |||||
| @@ -20,11 +20,13 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| """Net definition""" | """Net definition""" | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.AssignSub = P.AssignSub() | self.AssignSub = P.AssignSub() | ||||
| @@ -39,8 +41,8 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| """test AssignSub""" | """test AssignSub""" | ||||
| net = Net() | net = Net() | ||||
| x = Tensor(np.ones([1]).astype(np.int32)*100) | |||||
| x = Tensor(np.ones([1]).astype(np.int32) * 100) | |||||
| print("MyPrintResult dataX:", x) | print("MyPrintResult dataX:", x) | ||||
| result = net(x) | result = net(x) | ||||
| print("MyPrintResult data::", result.asnumpy()) | |||||
| print("MyPrintResult data::", result.asnumpy()) | |||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self, keep_dims, axis): | def __init__(self, keep_dims, axis): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -31,8 +34,10 @@ class Net(nn.Cell): | |||||
| def construct(self, inputs): | def construct(self, inputs): | ||||
| return self.reduce_mean(inputs, self.axis) | return self.reduce_mean(inputs, self.axis) | ||||
| x1 = np.random.randn(64).astype(np.float32) | x1 = np.random.randn(64).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| keepdims = False | keepdims = False | ||||
| axis = -1 | axis = -1 | ||||
| @@ -21,6 +21,7 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -29,8 +30,9 @@ class Net(nn.Cell): | |||||
| def construct(self, x, y): | def construct(self, x, y): | ||||
| return self.add(x, y) | return self.add(x, y) | ||||
| x = np.random.randn(1,3,3,4).astype(np.float32) | |||||
| y = np.random.randn(1,3,3,4).astype(np.float32) | |||||
| x = np.random.randn(1, 3, 3, 4).astype(np.float32) | |||||
| y = np.random.randn(1, 3, 3, 4).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| @@ -20,15 +20,19 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.add = P.AddN() | self.add = P.AddN() | ||||
| def construct(self, x, y): | def construct(self, x, y): | ||||
| return self.add((x, y)) | return self.add((x, y)) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(1, 3, 3, 4).astype(np.float32) | x = np.random.randn(1, 3, 3, 4).astype(np.float32) | ||||
| y = np.random.randn(1, 3, 3, 4).astype(np.float32) | y = np.random.randn(1, 3, 3, 4).astype(np.float32) | ||||
| @@ -19,6 +19,7 @@ from mindspore.nn import Dense, SoftmaxCrossEntropyWithLogits | |||||
| from mindspore.nn import TrainOneStepCell, WithLossCell | from mindspore.nn import TrainOneStepCell, WithLossCell | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", impl_type="tbe") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend", impl_type="tbe") | ||||
| context.set_context(enable_task_sink=True) | context.set_context(enable_task_sink=True) | ||||
| @@ -44,16 +45,16 @@ class Adam: | |||||
| label = Tensor(label_np_onehot) | label = Tensor(label_np_onehot) | ||||
| ms_dense = Dense(in_channels=self.input_channels, | ms_dense = Dense(in_channels=self.input_channels, | ||||
| out_channels=self.output_channels, | |||||
| weight_init=weight_np, | |||||
| bias_init=bias, has_bias=True) | |||||
| out_channels=self.output_channels, | |||||
| weight_init=weight_np, | |||||
| bias_init=bias, has_bias=True) | |||||
| criterion = SoftmaxCrossEntropyWithLogits() | criterion = SoftmaxCrossEntropyWithLogits() | ||||
| optimizer = nn.Adam(ms_dense.trainable_params(), | optimizer = nn.Adam(ms_dense.trainable_params(), | ||||
| learning_rate=1e-3, | |||||
| beta1=0.9, beta2=0.999, eps=self.epsilon, | |||||
| use_locking=False, | |||||
| use_nesterov=False, weight_decay=0.0, | |||||
| loss_scale=1.0) | |||||
| learning_rate=1e-3, | |||||
| beta1=0.9, beta2=0.999, eps=self.epsilon, | |||||
| use_locking=False, | |||||
| use_nesterov=False, weight_decay=0.0, | |||||
| loss_scale=1.0) | |||||
| net_with_criterion = WithLossCell(ms_dense, criterion) | net_with_criterion = WithLossCell(ms_dense, criterion) | ||||
| train_network = TrainOneStepCell(net_with_criterion, optimizer) | train_network = TrainOneStepCell(net_with_criterion, optimizer) | ||||
| @@ -68,5 +69,5 @@ class Adam: | |||||
| def test_adam(): | def test_adam(): | ||||
| fact = Adam(batch_num=8, input_channels=20, output_channels=5, epoch=5, lr=0.1, weight_decay=0.0, epsilon= 1e-8) | |||||
| fact = Adam(batch_num=8, input_channels=20, output_channels=5, epoch=5, lr=0.1, weight_decay=0.0, epsilon=1e-8) | |||||
| fact.train_mindspore_impl() | fact.train_mindspore_impl() | ||||
| @@ -21,23 +21,26 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0) | self.apply_momentum = P.ApplyMomentum(gradient_scale=1024.0) | ||||
| self.variable = Parameter(initializer( | self.variable = Parameter(initializer( | ||||
| 'normal', [2, 3, 3, 4]), name='variable') | |||||
| 'normal', [2, 3, 3, 4]), name='variable') | |||||
| self.accumulation = Parameter(initializer( | self.accumulation = Parameter(initializer( | ||||
| 'normal', [2, 3, 3, 4]), name='accumulation') | |||||
| 'normal', [2, 3, 3, 4]), name='accumulation') | |||||
| self.learning_rate = Parameter(initializer( | self.learning_rate = Parameter(initializer( | ||||
| 'normal', [1, ]), name='learning_rate') | |||||
| 'normal', [1, ]), name='learning_rate') | |||||
| self.gradient = Parameter(initializer( | self.gradient = Parameter(initializer( | ||||
| 'normal', [2, 3, 3, 4]), name='gradient') | |||||
| 'normal', [2, 3, 3, 4]), name='gradient') | |||||
| self.momentum = Parameter(initializer( | self.momentum = Parameter(initializer( | ||||
| 'normal', [1, ]), name='momentum') | |||||
| 'normal', [1, ]), name='momentum') | |||||
| def construct(self): | def construct(self): | ||||
| return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum) | return self.apply_momentum(self.variable, self.accumulation, self.learning_rate, self.gradient, self.momentum) | ||||
| def test_net(): | def test_net(): | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| apply_momentum = Net() | apply_momentum = Net() | ||||
| @@ -19,8 +19,10 @@ from mindspore.nn import Cell | |||||
| from mindspore.train.model import Model | from mindspore.train.model import Model | ||||
| import pytest | import pytest | ||||
| from mindspore import context | from mindspore import context | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(Cell): | class Net(Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,17 +32,20 @@ class Net(Cell): | |||||
| x = self.batchmatmul(inputa, inputb) | x = self.batchmatmul(inputa, inputb) | ||||
| return x | return x | ||||
| def tf_me_batchmatmul(inputa, inputb): | def tf_me_batchmatmul(inputa, inputb): | ||||
| net = Net() | net = Net() | ||||
| net.set_train() | net.set_train() | ||||
| model = Model(net) | model = Model(net) | ||||
| out_me = model.predict(Tensor(inputa), Tensor(inputb)) | out_me = model.predict(Tensor(inputa), Tensor(inputb)) | ||||
| def test_batchmatmul_normal_shape1(): | def test_batchmatmul_normal_shape1(): | ||||
| inputa = np.random.randn(128, 16, 128).astype(np.float32) | inputa = np.random.randn(128, 16, 128).astype(np.float32) | ||||
| inputb = np.random.randn(128, 128, 64).astype(np.float32) | inputb = np.random.randn(128, 128, 64).astype(np.float32) | ||||
| tf_me_batchmatmul(Tensor(inputa), Tensor(inputb)) | tf_me_batchmatmul(Tensor(inputa), Tensor(inputb)) | ||||
| def test_batchmatmul_normal_shape2(): | def test_batchmatmul_normal_shape2(): | ||||
| inputa = np.random.randn(1, 16, 128, 128).astype(np.float32) | inputa = np.random.randn(1, 16, 128, 128).astype(np.float32) | ||||
| inputb = np.random.randn(1, 16, 128, 64).astype(np.float32) | inputb = np.random.randn(1, 16, 128, 64).astype(np.float32) | ||||
| @@ -21,6 +21,7 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -35,7 +36,7 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(1,64,112,112).astype(np.float32) | |||||
| x = np.random.randn(1, 64, 112, 112).astype(np.float32) | |||||
| # mean = np.random.randn(1,16,1,1).astype(np.float32) | # mean = np.random.randn(1,16,1,1).astype(np.float32) | ||||
| # variance = np.random.randn(1,16,1,1).astype(np.float32) | # variance = np.random.randn(1,16,1,1).astype(np.float32) | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| @@ -55,4 +56,3 @@ def test_net(): | |||||
| print("***********output y*********") | print("***********output y*********") | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -21,8 +21,11 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| #context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| # context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -48,7 +51,7 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(1,64,112,112).astype(np.float32) | |||||
| sens = np.random.randn(1,64,112,112).astype(np.float32) | |||||
| x = np.random.randn(1, 64, 112, 112).astype(np.float32) | |||||
| sens = np.random.randn(1, 64, 112, 112).astype(np.float32) | |||||
| net = Grad(Net()) | net = Grad(Net()) | ||||
| output = net(Tensor(x), Tensor(sens)) | output = net(Tensor(x), Tensor(sens)) | ||||
| @@ -20,11 +20,13 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| """Net definition""" | """Net definition""" | ||||
| def __init__(self, | def __init__(self, | ||||
| output_channels, | output_channels, | ||||
| bias_init='zeros', | bias_init='zeros', | ||||
| @@ -51,4 +53,3 @@ def test_compile(): | |||||
| # enable it when staging function is ready | # enable it when staging function is ready | ||||
| output = net(input_data) | output = net(input_data) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -21,7 +21,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -31,6 +34,7 @@ class Net(nn.Cell): | |||||
| def construct(self, dout): | def construct(self, dout): | ||||
| return self.bias_add_grad(dout) | return self.bias_add_grad(dout) | ||||
| def test_net(): | def test_net(): | ||||
| dout = np.random.rand(1, 1001).astype(np.float32) | dout = np.random.rand(1, 1001).astype(np.float32) | ||||
| bias_add_grad = Net() | bias_add_grad = Net() | ||||
| @@ -20,11 +20,12 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__( self): | |||||
| def __init__(self): | |||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.cat = P.Concat(axis=1) | self.cat = P.Concat(axis=1) | ||||
| @@ -46,4 +47,4 @@ def test_net(): | |||||
| print(np.arange(2 * 2).reshape(2, 2)) | print(np.arange(2 * 2).reshape(2, 2)) | ||||
| print(np.arange(2 * 3).reshape(2, 3)) | print(np.arange(2 * 3).reshape(2, 3)) | ||||
| print(output) | print(output) | ||||
| assert(output.asnumpy() == expect).all() | |||||
| assert (output.asnumpy() == expect).all() | |||||
| @@ -21,31 +21,30 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| out_channel = 64 | out_channel = 64 | ||||
| kernel_size = 7 | kernel_size = 7 | ||||
| self.conv = P.Conv2D(out_channel, | self.conv = P.Conv2D(out_channel, | ||||
| kernel_size, | |||||
| mode=1, | |||||
| pad_mode="valid", | |||||
| pad=0, | |||||
| stride=1, | |||||
| dilation=1, | |||||
| group=1) | |||||
| kernel_size, | |||||
| mode=1, | |||||
| pad_mode="valid", | |||||
| pad=0, | |||||
| stride=1, | |||||
| dilation=1, | |||||
| group=1) | |||||
| self.w = Parameter(initializer( | self.w = Parameter(initializer( | ||||
| 'normal', [64, 3, 7, 7]), name='w') | |||||
| 'normal', [64, 3, 7, 7]), name='w') | |||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.conv(x, self.w) | return self.conv(x, self.w) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32,3,224,224).astype(np.float32) | |||||
| x = np.random.randn(32, 3, 224, 224).astype(np.float32) | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| conv = Net() | conv = Net() | ||||
| output = conv(Tensor(x)) | output = conv(Tensor(x)) | ||||
| @@ -21,6 +21,7 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target='Ascend') | context.set_context(device_target='Ascend') | ||||
| @@ -37,19 +38,21 @@ class Net(nn.Cell): | |||||
| stride=1, | stride=1, | ||||
| dilation=1, | dilation=1, | ||||
| group=1) | group=1) | ||||
| self.w = Parameter(initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]), name='w') | |||||
| self.w = Parameter( | |||||
| initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]), | |||||
| name='w') | |||||
| self.x = Parameter(initializer(Tensor(np.array([[[ | self.x = Parameter(initializer(Tensor(np.array([[[ | ||||
| [3, 0, 1, 2, 7, 4], | [3, 0, 1, 2, 7, 4], | ||||
| [1, 5, 8, 9, 3, 1], | [1, 5, 8, 9, 3, 1], | ||||
| [2, 7, 2, 5, 1, 3], | [2, 7, 2, 5, 1, 3], | ||||
| [0, 1, 3, 1, 7, 8], | [0, 1, 3, 1, 7, 8], | ||||
| [4, 2, 1, 6, 2, 8], | [4, 2, 1, 6, 2, 8], | ||||
| [2, 4, 5, 2, 3, 9]]]]).astype(np.float32)), [1,1,6,6]), name='x') | |||||
| [2, 4, 5, 2, 3, 9]]]]).astype(np.float32)), [1, 1, 6, 6]), name='x') | |||||
| self.out = Parameter(initializer(Tensor(np.array([[[ | self.out = Parameter(initializer(Tensor(np.array([[[ | ||||
| [ -5, -4, 0, 8], | |||||
| [-10, -2, 2, 3], | |||||
| [ 0, -2, -4, -7], | |||||
| [ -3, -2, -3, -16]]]]).astype(np.float32)),[1,1,4,4]), name='y') | |||||
| [-5, -4, 0, 8], | |||||
| [-10, -2, 2, 3], | |||||
| [0, -2, -4, -7], | |||||
| [-3, -2, -3, -16]]]]).astype(np.float32)), [1, 1, 4, 4]), name='y') | |||||
| self.get_shape = P.Shape() | self.get_shape = P.Shape() | ||||
| @ms_function | @ms_function | ||||
| @@ -67,7 +70,7 @@ def test_conv2d_backprop_filter(): | |||||
| [-104, -211, -322] | [-104, -211, -322] | ||||
| [-102, -144, -248]]]] | [-102, -144, -248]]]] | ||||
| """ | """ | ||||
| expect = np.array([[[[ -60, -142, -265], | |||||
| expect = np.array([[[[-60, -142, -265], | |||||
| [-104, -211, -322], | [-104, -211, -322], | ||||
| [-102, -144, -248]]]]).astype(np.float32) | [-102, -144, -248]]]]).astype(np.float32) | ||||
| print(output) | print(output) | ||||
| @@ -20,6 +20,7 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -36,19 +37,21 @@ class Net(nn.Cell): | |||||
| stride=1, | stride=1, | ||||
| dilation=1, | dilation=1, | ||||
| group=1) | group=1) | ||||
| self.w = Parameter(initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]), name='w') | |||||
| self.w = Parameter( | |||||
| initializer(Tensor(np.array([[[[1, 0, -1], [1, 0, -1], [1, 0, -1]]]]).astype(np.float32)), [1, 1, 3, 3]), | |||||
| name='w') | |||||
| self.x = Parameter(initializer(Tensor(np.array([[[ | self.x = Parameter(initializer(Tensor(np.array([[[ | ||||
| [3, 0, 1, 2, 7, 4], | [3, 0, 1, 2, 7, 4], | ||||
| [1, 5, 8, 9, 3, 1], | [1, 5, 8, 9, 3, 1], | ||||
| [2, 7, 2, 5, 1, 3], | [2, 7, 2, 5, 1, 3], | ||||
| [0, 1, 3, 1, 7, 8], | [0, 1, 3, 1, 7, 8], | ||||
| [4, 2, 1, 6, 2, 8], | [4, 2, 1, 6, 2, 8], | ||||
| [2, 4, 5, 2, 3, 9]]]]).astype(np.float32)), [1,1,6,6]), name='x') | |||||
| [2, 4, 5, 2, 3, 9]]]]).astype(np.float32)), [1, 1, 6, 6]), name='x') | |||||
| self.out = Parameter(initializer(Tensor(np.array([[[ | self.out = Parameter(initializer(Tensor(np.array([[[ | ||||
| [ -5, -4, 0, 8], | |||||
| [-10, -2, 2, 3], | |||||
| [ 0, -2, -4, -7], | |||||
| [ -3, -2, -3, -16]]]]).astype(np.float32)),[1,1,4,4]), name='y') | |||||
| [-5, -4, 0, 8], | |||||
| [-10, -2, 2, 3], | |||||
| [0, -2, -4, -7], | |||||
| [-3, -2, -3, -16]]]]).astype(np.float32)), [1, 1, 4, 4]), name='y') | |||||
| self.get_shape = P.Shape() | self.get_shape = P.Shape() | ||||
| @ms_function | @ms_function | ||||
| @@ -69,11 +72,11 @@ def test_conv2d_backprop_input(): | |||||
| [ -3, -4, -4, -19, 7, 23] | [ -3, -4, -4, -19, 7, 23] | ||||
| [ -3, -2, 0, -14, 3, 16]]]] | [ -3, -2, 0, -14, 3, 16]]]] | ||||
| """ | """ | ||||
| expect = np.array([[[[ -5, -4, 5, 12, 0, -8], | |||||
| [-15, -6, 17, 17, -2, -11], | |||||
| [-15, -8, 13, 12, 2, -4], | |||||
| [-13, -6, 8, -14, 5, 20], | |||||
| [ -3, -4, -4, -19, 7, 23], | |||||
| [ -3, -2, 0, -14, 3, 16]]]]).astype(np.float32) | |||||
| expect = np.array([[[[-5, -4, 5, 12, 0, -8], | |||||
| [-15, -6, 17, 17, -2, -11], | |||||
| [-15, -8, 13, 12, 2, -4], | |||||
| [-13, -6, 8, -14, 5, 20], | |||||
| [-3, -4, -4, -19, 7, 23], | |||||
| [-3, -2, 0, -14, 3, 16]]]]).astype(np.float32) | |||||
| print(output) | print(output) | ||||
| assert (output.asnumpy() == expect).all() | assert (output.asnumpy() == expect).all() | ||||
| @@ -20,9 +20,11 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| from mindspore import log as logger | from mindspore import log as logger | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -33,7 +35,7 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(2,5,8).astype(np.float32) | |||||
| x = np.random.randn(2, 5, 8).astype(np.float32) | |||||
| mask = np.random.randn(16).astype(np.uint8) | mask = np.random.randn(16).astype(np.uint8) | ||||
| keep_prob = 1 | keep_prob = 1 | ||||
| @@ -48,4 +50,3 @@ def test_net(): | |||||
| logger.info("***********output y*********") | logger.info("***********output y*********") | ||||
| logger.info(output.asnumpy()) | logger.info(output.asnumpy()) | ||||
| @@ -21,6 +21,7 @@ import math | |||||
| import pytest | import pytest | ||||
| from mindspore import context | from mindspore import context | ||||
| from mindspore import log as logger | from mindspore import log as logger | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| @@ -52,6 +53,7 @@ def test_gelu_input_dim_0(): | |||||
| with pytest.raises(ValueError): | with pytest.raises(ValueError): | ||||
| gelu_forward_cmp(input_shape) | gelu_forward_cmp(input_shape) | ||||
| def test_gelu_input_dim_10240_1024(): | def test_gelu_input_dim_10240_1024(): | ||||
| input_shape = [10240, 1024] | input_shape = [10240, 1024] | ||||
| gelu_forward_cmp(input_shape) | gelu_forward_cmp(input_shape) | ||||
| @@ -96,6 +98,7 @@ def test_gelu_input_dim_128_4096(): | |||||
| input_shape = [128, 4096] | input_shape = [128, 4096] | ||||
| gelu_forward_cmp(input_shape) | gelu_forward_cmp(input_shape) | ||||
| @pytest.mark.lower_bs | @pytest.mark.lower_bs | ||||
| def test_gelu_input_dim_160_1024(): | def test_gelu_input_dim_160_1024(): | ||||
| input_shape = [160, 1024] | input_shape = [160, 1024] | ||||
| @@ -25,6 +25,7 @@ from mindspore import log as logger | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Grad(Cell): | class Grad(Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -55,6 +56,7 @@ def gelu_backward_cmp(input_shape): | |||||
| logger.info("---------me--------") | logger.info("---------me--------") | ||||
| logger.info(output_grad_me) | logger.info(output_grad_me) | ||||
| # ---------- LARGE INPUT --------------- | # ---------- LARGE INPUT --------------- | ||||
| class MEGeluLargeIn(Cell): | class MEGeluLargeIn(Cell): | ||||
| @@ -67,6 +69,7 @@ class MEGeluLargeIn(Cell): | |||||
| x = self.matmul(x1, x2) | x = self.matmul(x1, x2) | ||||
| return self.gelu(x) | return self.gelu(x) | ||||
| class GradLargeIn(Cell): | class GradLargeIn(Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(GradLargeIn, self).__init__() | super(GradLargeIn, self).__init__() | ||||
| @@ -86,5 +89,5 @@ def gelu_backward_me_large_in_impl(x1, x2, output_grad): | |||||
| def test_grad_gelu_input_10240_1024(): | def test_grad_gelu_input_10240_1024(): | ||||
| input_shape = [10240,1024] | |||||
| input_shape = [10240, 1024] | |||||
| gelu_backward_cmp(input_shape) | gelu_backward_cmp(input_shape) | ||||
| @@ -20,8 +20,10 @@ from mindspore.common.tensor import Tensor | |||||
| from mindspore.train.model import Model | from mindspore.train.model import Model | ||||
| from mindspore import log as logger | from mindspore import log as logger | ||||
| from mindspore import context | from mindspore import context | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Greater(Cell): | class Greater(Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Greater, self).__init__() | super(Greater, self).__init__() | ||||
| @@ -30,6 +32,7 @@ class Greater(Cell): | |||||
| def construct(self, inputa, inputb): | def construct(self, inputa, inputb): | ||||
| return self.greater(inputa, inputb) | return self.greater(inputa, inputb) | ||||
| def me_greater(inputa, inputb): | def me_greater(inputa, inputb): | ||||
| net = Greater() | net = Greater() | ||||
| net.set_train() | net.set_train() | ||||
| @@ -42,10 +45,11 @@ def me_greater(inputa, inputb): | |||||
| logger.info(inputb) | logger.info(inputb) | ||||
| return out.asnumpy() | return out.asnumpy() | ||||
| @pytest.mark.ssd_tbe | @pytest.mark.ssd_tbe | ||||
| def test_greater_2d_scalar0(): | def test_greater_2d_scalar0(): | ||||
| a = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | a = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | ||||
| b = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | b = np.random.randint(-5, 5, [8, 32]).astype(np.int32) | ||||
| out_me = me_greater(Tensor(a), Tensor(b)) | out_me = me_greater(Tensor(a), Tensor(b)) | ||||
| logger.info("Check me result:") | logger.info("Check me result:") | ||||
| logger.info(out_me) | |||||
| logger.info(out_me) | |||||
| @@ -20,8 +20,10 @@ from mindspore.train.model import Model | |||||
| from mindspore import log as logger | from mindspore import log as logger | ||||
| import pytest | import pytest | ||||
| from mindspore import context | from mindspore import context | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(Cell): | class Net(Cell): | ||||
| def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -31,6 +33,7 @@ class Net(Cell): | |||||
| x = self.layernorm(input) | x = self.layernorm(input) | ||||
| return x | return x | ||||
| def pt_me_layernorm(input_data, normalized_shape, gamma, beta, axis): | def pt_me_layernorm(input_data, normalized_shape, gamma, beta, axis): | ||||
| net = Net(normalized_shape, begin_norm_axis=axis, | net = Net(normalized_shape, begin_norm_axis=axis, | ||||
| begin_params_axis=axis, | begin_params_axis=axis, | ||||
| @@ -42,6 +45,7 @@ def pt_me_layernorm(input_data, normalized_shape, gamma, beta, axis): | |||||
| logger.info("Check me result:") | logger.info("Check me result:") | ||||
| logger.info(out_me.asnumpy()) | logger.info(out_me.asnumpy()) | ||||
| @pytest.mark.lower_bs | @pytest.mark.lower_bs | ||||
| def test_normal_layernorm_1_128_1024_axis_2(): | def test_normal_layernorm_1_128_1024_axis_2(): | ||||
| """ | """ | ||||
| @@ -52,4 +56,4 @@ def test_normal_layernorm_1_128_1024_axis_2(): | |||||
| gamma.fill(1.1) | gamma.fill(1.1) | ||||
| beta = np.random.randn(1024).astype(np.float32) | beta = np.random.randn(1024).astype(np.float32) | ||||
| beta.fill(0.1) | beta.fill(0.1) | ||||
| pt_me_layernorm(input_data, (1024, ), gamma, beta, 2) | |||||
| pt_me_layernorm(input_data, (1024,), gamma, beta, 2) | |||||
| @@ -19,18 +19,21 @@ from mindspore.nn import Cell | |||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| from mindspore import log as logger | from mindspore import log as logger | ||||
| from mindspore import context | from mindspore import context | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Grad(Cell): | class Grad(Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| self.grad = GradOperation(name="get_all", get_all=True, sens_param=True) | self.grad = GradOperation(name="get_all", get_all=True, sens_param=True) | ||||
| self.network = network | self.network = network | ||||
| def construct(self, input, output_grad,): | |||||
| def construct(self, input, output_grad, ): | |||||
| gout = self.grad(self.network)(input, output_grad) | gout = self.grad(self.network)(input, output_grad) | ||||
| return gout | return gout | ||||
| class Net(Cell): | class Net(Cell): | ||||
| def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | def __init__(self, input_shape, begin_norm_axis, begin_params_axis, gamma, beta): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -40,6 +43,7 @@ class Net(Cell): | |||||
| x = self.layernorm(input) | x = self.layernorm(input) | ||||
| return x | return x | ||||
| def py_me_layernorm_grad(input_data, normalized_shape, gamma, beta, axis, gradients): | def py_me_layernorm_grad(input_data, normalized_shape, gamma, beta, axis, gradients): | ||||
| input_me = Tensor(input_data) | input_me = Tensor(input_data) | ||||
| net_me = Grad(Net(normalized_shape, begin_norm_axis=axis, | net_me = Grad(Net(normalized_shape, begin_norm_axis=axis, | ||||
| @@ -52,6 +56,7 @@ def py_me_layernorm_grad(input_data, normalized_shape, gamma, beta, axis, gradie | |||||
| logger.info("Check me result:") | logger.info("Check me result:") | ||||
| logger.info(out_grad.asnumpy()) | logger.info(out_grad.asnumpy()) | ||||
| def test_normal_layernorm_grad_normalize_2d(): | def test_normal_layernorm_grad_normalize_2d(): | ||||
| """ | """ | ||||
| 1 input[1, 128, 1024],normalized_shape=[1024],element_affine=False | 1 input[1, 128, 1024],normalized_shape=[1024],element_affine=False | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,8 +31,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.less(x1, x2) | return self.less(x1, x2) | ||||
| x1 = np.random.randn(3,4).astype(np.float16) | |||||
| x2 = np.random.randn(3,4).astype(np.float16) | |||||
| x1 = np.random.randn(3, 4).astype(np.float16) | |||||
| x2 = np.random.randn(3, 4).astype(np.float16) | |||||
| def test_net(): | def test_net(): | ||||
| less = Net() | less = Net() | ||||
| @@ -37,4 +42,3 @@ def test_net(): | |||||
| print(x1) | print(x1) | ||||
| print(x2) | print(x2) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,8 +31,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.less_equal(x1, x2) | return self.less_equal(x1, x2) | ||||
| x1 = np.random.randn(3,4).astype(np.float16) | |||||
| x2 = np.random.randn(3,4).astype(np.float16) | |||||
| x1 = np.random.randn(3, 4).astype(np.float16) | |||||
| x2 = np.random.randn(3, 4).astype(np.float16) | |||||
| def test_net(): | def test_net(): | ||||
| less_equal = Net() | less_equal = Net() | ||||
| @@ -37,4 +42,3 @@ def test_net(): | |||||
| print(x1) | print(x1) | ||||
| print(x2) | print(x2) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,12 +31,14 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.logical_and(x1, x2) | return self.logical_and(x1, x2) | ||||
| x1 = [True, True, False, False, True, True, False, False] | x1 = [True, True, False, False, True, True, False, False] | ||||
| x2 = [True, False, False, True, True, False, False, True] | x2 = [True, False, False, True, True, False, False, True] | ||||
| def test_net(): | def test_net(): | ||||
| logical_and = Net() | logical_and = Net() | ||||
| output = logical_and(Tensor(x1), Tensor(x2)) | output = logical_and(Tensor(x1), Tensor(x2)) | ||||
| print(x1) | print(x1) | ||||
| print(x2) | print(x2) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,11 +31,12 @@ class Net(nn.Cell): | |||||
| def construct(self, x1): | def construct(self, x1): | ||||
| return self.logical_not(x1) | return self.logical_not(x1) | ||||
| x1 = [True, True, False, False, True, True, False, False] | x1 = [True, True, False, False, True, True, False, False] | ||||
| def test_net(): | def test_net(): | ||||
| logical_not = Net() | logical_not = Net() | ||||
| output = logical_not(Tensor(x1)) | output = logical_not(Tensor(x1)) | ||||
| print(x1) | print(x1) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,12 +31,14 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.logical_or(x1, x2) | return self.logical_or(x1, x2) | ||||
| x1 = [True, True, False, False, True, True, False, False] | x1 = [True, True, False, False, True, True, False, False] | ||||
| x2 = [True, False, False, True, True, False, False, True] | x2 = [True, False, False, True, True, False, False, True] | ||||
| def test_net(): | def test_net(): | ||||
| logical_or = Net() | logical_or = Net() | ||||
| output = logical_or(Tensor(x1), Tensor(x2)) | output = logical_or(Tensor(x1), Tensor(x2)) | ||||
| print(x1) | print(x1) | ||||
| print(x2) | print(x2) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -21,6 +21,7 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,8 +31,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.matmul(x1, x2) | return self.matmul(x1, x2) | ||||
| x1 = np.random.randn(1,3).astype(np.float32) | |||||
| x2 = np.random.randn(3,4).astype(np.float32) | |||||
| x1 = np.random.randn(1, 3).astype(np.float32) | |||||
| x2 = np.random.randn(3, 4).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,8 +33,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.matmul(x1, x2) | return self.matmul(x1, x2) | ||||
| x1 = np.random.randn(10,1).astype(np.float32) | |||||
| x2 = np.random.randn(100,1).astype(np.float32) | |||||
| x1 = np.random.randn(10, 1).astype(np.float32) | |||||
| x2 = np.random.randn(100, 1).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| matmul = Net() | matmul = Net() | ||||
| @@ -22,14 +22,16 @@ from mindspore.ops import operations as P | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Max(nn.Cell): | class Max(nn.Cell): | ||||
| def __init__(self,dtype): | |||||
| def __init__(self, dtype): | |||||
| super(Max, self).__init__() | super(Max, self).__init__() | ||||
| self.max = P.Maximum() | self.max = P.Maximum() | ||||
| def construct(self, inputa, inputb): | def construct(self, inputa, inputb): | ||||
| return self.max(inputa, inputb) | return self.max(inputa, inputb) | ||||
| def me_max(inputa, inputb, dtype=ms.float32): | def me_max(inputa, inputb, dtype=ms.float32): | ||||
| context.set_context(mode=context.GRAPH_MODE) | context.set_context(mode=context.GRAPH_MODE) | ||||
| net = Max(dtype) | net = Max(dtype) | ||||
| @@ -44,14 +46,16 @@ def me_max(inputa, inputb, dtype=ms.float32): | |||||
| print(out) | print(out) | ||||
| return out.asnumpy() | return out.asnumpy() | ||||
| def cmp_max(a,b): | |||||
| def cmp_max(a, b): | |||||
| out = np.maximum(a, b) | out = np.maximum(a, b) | ||||
| out_ms = me_max(a, b) | out_ms = me_max(a, b) | ||||
| print("-------ms------") | print("-------ms------") | ||||
| print("numpy out :{}".format(out)) | print("numpy out :{}".format(out)) | ||||
| print("ms out :{}".format(out_ms)) | print("ms out :{}".format(out_ms)) | ||||
| def test_maximum_2_2(): | def test_maximum_2_2(): | ||||
| a = np.random.randn(2, 2).astype(np.float32) | a = np.random.randn(2, 2).astype(np.float32) | ||||
| b = np.random.randn(2, 2).astype(np.float32) | b = np.random.randn(2, 2).astype(np.float32) | ||||
| cmp_max(a,b) | |||||
| cmp_max(a, b) | |||||
| @@ -22,6 +22,7 @@ from mindspore.ops import operations as P | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| grad = C.GradOperation('get_all', get_all=True, sens_param=True) | grad = C.GradOperation('get_all', get_all=True, sens_param=True) | ||||
| class MaxNetMe(Cell): | class MaxNetMe(Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(MaxNetMe, self).__init__() | super(MaxNetMe, self).__init__() | ||||
| @@ -31,6 +32,7 @@ class MaxNetMe(Cell): | |||||
| x = self.max(inputA, inputB) | x = self.max(inputA, inputB) | ||||
| return x | return x | ||||
| class GradWrap(Cell): | class GradWrap(Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(GradWrap, self).__init__() | super(GradWrap, self).__init__() | ||||
| @@ -40,6 +42,7 @@ class GradWrap(Cell): | |||||
| gout = grad(self.network)(inputA, inputB, sens) | gout = grad(self.network)(inputA, inputB, sens) | ||||
| return gout | return gout | ||||
| def gen_data(inputA_np, inputB_np, grad=None): | def gen_data(inputA_np, inputB_np, grad=None): | ||||
| inputA_me = inputA_np | inputA_me = inputA_np | ||||
| if isinstance(inputA_np, np.ndarray) == True: | if isinstance(inputA_np, np.ndarray) == True: | ||||
| @@ -61,7 +64,8 @@ def gen_data(inputA_np, inputB_np, grad=None): | |||||
| print(output[0].asnumpy()) | print(output[0].asnumpy()) | ||||
| print(output[1].asnumpy()) | print(output[1].asnumpy()) | ||||
| def test_net(): | def test_net(): | ||||
| inputA_np = np.random.randn(1, 3, 2, 2).astype(np.float32) | inputA_np = np.random.randn(1, 3, 2, 2).astype(np.float32) | ||||
| inputB_np = np.random.randn(1, 3, 2, 2).astype(np.float32) | inputB_np = np.random.randn(1, 3, 2, 2).astype(np.float32) | ||||
| gen_data(inputA_np, inputB_np) | |||||
| gen_data(inputA_np, inputB_np) | |||||
| @@ -19,12 +19,12 @@ from mindspore.common.api import ms_function | |||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.maxpool = P.MaxPool(padding="SAME", ksize=3, strides=2) | self.maxpool = P.MaxPool(padding="SAME", ksize=3, strides=2) | ||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| output = self.maxpool(x) | output = self.maxpool(x) | ||||
| @@ -32,7 +32,7 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(32,64,112,112).astype(np.float16) | |||||
| x = np.random.randn(32, 64, 112, 112).astype(np.float16) | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| maxpool = Net() | maxpool = Net() | ||||
| output = maxpool(Tensor(x)) | output = maxpool(Tensor(x)) | ||||
| @@ -19,6 +19,7 @@ from mindspore.common.api import ms_function | |||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -22,7 +22,10 @@ from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| import mindspore as ms | import mindspore as ms | ||||
| from mindspore.train.model import Model | from mindspore.train.model import Model | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Min(nn.Cell): | class Min(nn.Cell): | ||||
| def __init__(self, dtype): | def __init__(self, dtype): | ||||
| super(Min, self).__init__() | super(Min, self).__init__() | ||||
| @@ -46,7 +49,8 @@ def me_min(inputa, inputb, dtype=ms.float32): | |||||
| print(out) | print(out) | ||||
| return out.asnumpy() | return out.asnumpy() | ||||
| def cmp_min(a,b): | |||||
| def cmp_min(a, b): | |||||
| print(a) | print(a) | ||||
| print(b) | print(b) | ||||
| @@ -55,8 +59,8 @@ def cmp_min(a,b): | |||||
| out_me = me_min(a, b) | out_me = me_min(a, b) | ||||
| print(out_me) | print(out_me) | ||||
| def test_minimum_2_2(): | def test_minimum_2_2(): | ||||
| a = np.random.randn(2, 2, 1, 1).astype(np.float32) | a = np.random.randn(2, 2, 1, 1).astype(np.float32) | ||||
| b = np.random.randn(2, 2, 1, 1).astype(np.float32) | b = np.random.randn(2, 2, 1, 1).astype(np.float32) | ||||
| cmp_min(a,b) | |||||
| cmp_min(a, b) | |||||
| @@ -22,6 +22,8 @@ from mindspore.ops.operations import Minimum | |||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| grad = C.GradOperation('get_all', get_all=True, sens_param=True) | grad = C.GradOperation('get_all', get_all=True, sens_param=True) | ||||
| class MinNetMe(Cell): | class MinNetMe(Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(MinNetMe, self).__init__() | super(MinNetMe, self).__init__() | ||||
| @@ -41,6 +43,7 @@ class GradWrap(Cell): | |||||
| gout = grad(self.network)(inputA, inputB, sens) | gout = grad(self.network)(inputA, inputB, sens) | ||||
| return gout | return gout | ||||
| def gen_data(inputA_np, inputB_np, grad=None): | def gen_data(inputA_np, inputB_np, grad=None): | ||||
| inputA_me = inputA_np | inputA_me = inputA_np | ||||
| if isinstance(inputA_np, np.ndarray) == True: | if isinstance(inputA_np, np.ndarray) == True: | ||||
| @@ -51,7 +54,7 @@ def gen_data(inputA_np, inputB_np, grad=None): | |||||
| inputB_me = Tensor(inputB_np) | inputB_me = Tensor(inputB_np) | ||||
| if grad is None: | if grad is None: | ||||
| grad = np.random.randn(1, 3, 2, 2).astype(np.float32) | |||||
| grad = np.random.randn(1, 3, 2, 2).astype(np.float32) | |||||
| print(inputA_np) | print(inputA_np) | ||||
| print(inputB_np) | print(inputB_np) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,8 +31,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.mul(x1, x2) | return self.mul(x1, x2) | ||||
| x1 = np.random.randn(3,4).astype(np.float32) | |||||
| x2 = np.random.randn(3,4).astype(np.float32) | |||||
| x1 = np.random.randn(3, 4).astype(np.float32) | |||||
| x2 = np.random.randn(3, 4).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| mul = Net() | mul = Net() | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,8 +31,8 @@ class Net(nn.Cell): | |||||
| def construct(self): | def construct(self): | ||||
| return self.npu_alloc_float_status() | return self.npu_alloc_float_status() | ||||
| def test_net(): | def test_net(): | ||||
| npu_alloc_float_status = Net() | npu_alloc_float_status = Net() | ||||
| output = npu_alloc_float_status() | output = npu_alloc_float_status() | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,11 +31,12 @@ class Net(nn.Cell): | |||||
| def construct(self, x1): | def construct(self, x1): | ||||
| return self.npu_clear_float_status(x1) | return self.npu_clear_float_status(x1) | ||||
| x1 = np.random.randn(8).astype(np.float32) | x1 = np.random.randn(8).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| npu_clear_float_status = Net() | npu_clear_float_status = Net() | ||||
| output = npu_clear_float_status(Tensor(x1)) | output = npu_clear_float_status(Tensor(x1)) | ||||
| print(x1) | print(x1) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,11 +31,12 @@ class Net(nn.Cell): | |||||
| def construct(self, x1): | def construct(self, x1): | ||||
| return self.npu_get_float_status(x1) | return self.npu_get_float_status(x1) | ||||
| x1 = np.random.randn(8).astype(np.float32) | x1 = np.random.randn(8).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| npu_get_float_status = Net() | npu_get_float_status = Net() | ||||
| output = npu_get_float_status(Tensor(x1)) | output = npu_get_float_status(Tensor(x1)) | ||||
| print(x1) | print(x1) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -18,21 +18,24 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.pad = P.Pad(paddings=((3,2), (2,3))) | |||||
| self.pad = P.Pad(paddings=((3, 2), (2, 3))) | |||||
| @ms_function | @ms_function | ||||
| def construct(self, x): | def construct(self, x): | ||||
| x = self.pad(x) | x = self.pad(x) | ||||
| return x | return x | ||||
| x = np.random.random(size=(2, 2)).astype(np.float32) | x = np.random.random(size=(2, 2)).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| pad = Net() | pad = Net() | ||||
| output = pad(Tensor(x)) | output = pad(Tensor(x)) | ||||
| @@ -23,8 +23,10 @@ from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| import mindspore as ms | import mindspore as ms | ||||
| from mindspore.train.model import Model | from mindspore.train.model import Model | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class PowMe(Cell): | class PowMe(Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(PowMe, self).__init__() | super(PowMe, self).__init__() | ||||
| @@ -33,6 +35,7 @@ class PowMe(Cell): | |||||
| def construct(self, input, exp): | def construct(self, input, exp): | ||||
| return self.pow(input, exp) | return self.pow(input, exp) | ||||
| def pow_forward_me_impl(input, exp): | def pow_forward_me_impl(input, exp): | ||||
| n = PowMe() | n = PowMe() | ||||
| n.set_train() | n.set_train() | ||||
| @@ -40,6 +43,7 @@ def pow_forward_me_impl(input, exp): | |||||
| out = m.predict(input, exp) | out = m.predict(input, exp) | ||||
| return out.asnumpy() | return out.asnumpy() | ||||
| def pow_forward_cmp(input_shape, exp_shape): | def pow_forward_cmp(input_shape, exp_shape): | ||||
| if len(input_shape) == 0: | if len(input_shape) == 0: | ||||
| input_np = np.absolute(np.random.randn()) | input_np = np.absolute(np.random.randn()) | ||||
| @@ -54,14 +58,14 @@ def pow_forward_cmp(input_shape, exp_shape): | |||||
| exp_np = np.absolute(np.random.randn(*exp_shape).astype(np.float32)) | exp_np = np.absolute(np.random.randn(*exp_shape).astype(np.float32)) | ||||
| exp_tf = exp_np | exp_tf = exp_np | ||||
| exp_me = Tensor(exp_np, dtype=ms.float32) | exp_me = Tensor(exp_np, dtype=ms.float32) | ||||
| out_me = pow_forward_me_impl(input_me, exp_me) | out_me = pow_forward_me_impl(input_me, exp_me) | ||||
| print(input_me) | print(input_me) | ||||
| print(exp_me) | print(exp_me) | ||||
| print(out_me) | print(out_me) | ||||
| def test_pow_input_scalar_exp_scalar(): | def test_pow_input_scalar_exp_scalar(): | ||||
| input_shape = [] | input_shape = [] | ||||
| exp_shape = [] | exp_shape = [] | ||||
| pow_forward_cmp(input_shape, exp_shape) | pow_forward_cmp(input_shape, exp_shape) | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,8 +31,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x1, x2): | def construct(self, x1, x2): | ||||
| return self.realdiv(x1, x2) | return self.realdiv(x1, x2) | ||||
| x1 = np.random.randn(3,4).astype(np.float32) | |||||
| x2 = np.random.randn(3,4).astype(np.float32) | |||||
| x1 = np.random.randn(3, 4).astype(np.float32) | |||||
| x2 = np.random.randn(3, 4).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| realdiv = Net() | realdiv = Net() | ||||
| @@ -18,7 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -28,11 +31,12 @@ class Net(nn.Cell): | |||||
| def construct(self, x1): | def construct(self, x1): | ||||
| return self.reciprocal(x1) | return self.reciprocal(x1) | ||||
| x1 = np.random.randn(3, 4).astype(np.float32) | x1 = np.random.randn(3, 4).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| reciprocal = Net() | reciprocal = Net() | ||||
| output = reciprocal(Tensor(x1)) | output = reciprocal(Tensor(x1)) | ||||
| print(x1) | print(x1) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,8 +33,9 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.relu(x) | return self.relu(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(2,3,3,4).astype(np.float32) | |||||
| x = np.random.randn(2, 3, 3, 4).astype(np.float32) | |||||
| relu = Net() | relu = Net() | ||||
| output = relu(Tensor(x)) | output = relu(Tensor(x)) | ||||
| print(x) | print(x) | ||||
| @@ -21,8 +21,10 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,6 +35,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, input, output_grad): | def construct(self, input, output_grad): | ||||
| return self.grad(self.network)(input, output_grad) | return self.grad(self.network)(input, output_grad) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -41,9 +44,10 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.relu(x) | return self.relu(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(2,3,3,4).astype(np.float32) | |||||
| sens = np.random.randn(2,3,3,4).astype(np.float32) | |||||
| x = np.random.randn(2, 3, 3, 4).astype(np.float32) | |||||
| sens = np.random.randn(2, 3, 3, 4).astype(np.float32) | |||||
| net = Grad(Net()) | net = Grad(Net()) | ||||
| output = net(Tensor(x), Tensor(sens)) | output = net(Tensor(x), Tensor(sens)) | ||||
| print(len(output)) | print(len(output)) | ||||
| @@ -21,8 +21,10 @@ import mindspore.context as context | |||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -33,6 +35,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, input): | def construct(self, input): | ||||
| return self.grad(self.network)(input) | return self.grad(self.network)(input) | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -41,8 +44,9 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.relu_v2(x) | return self.relu_v2(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = Tensor(np.ones((2,3,3,4)).astype(np.float32)) | |||||
| x = Tensor(np.ones((2, 3, 3, 4)).astype(np.float32)) | |||||
| relu_net = Net() | relu_net = Net() | ||||
| relu_output = relu_net(x) | relu_output = relu_net(x) | ||||
| net = Grad(Net()) | net = Grad(Net()) | ||||
| @@ -18,8 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -29,6 +31,7 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.upsample(x) | return self.upsample(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.random(size=(32, 3, 32, 32)).astype(np.float32) | x = np.random.random(size=(32, 3, 32, 32)).astype(np.float32) | ||||
| upsample = Net() | upsample = Net() | ||||
| @@ -19,6 +19,7 @@ from mindspore.ops.composite import GradOperation | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -49,4 +50,4 @@ def test_net(): | |||||
| grad = Grad(Net()) | grad = Grad(Net()) | ||||
| output = grad(Tensor(image), Tensor(grads)) | output = grad(Tensor(image), Tensor(grads)) | ||||
| print("=================output====================") | print("=================output====================") | ||||
| print(output) | |||||
| print(output) | |||||
| @@ -20,6 +20,7 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| @@ -29,12 +30,13 @@ class Net(nn.Cell): | |||||
| self.scatternd = P.ScatterNd() | self.scatternd = P.ScatterNd() | ||||
| def construct(self, indices, update): | def construct(self, indices, update): | ||||
| return self.scatternd(indices, update, (3,3)) | |||||
| return self.scatternd(indices, update, (3, 3)) | |||||
| indices = np.array([[0, 1], [1, 1]]).astype(np.int32) | indices = np.array([[0, 1], [1, 1]]).astype(np.int32) | ||||
| update = np.array([3.2, 1.1]).astype(np.float32) | update = np.array([3.2, 1.1]).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| scatternd = Net() | scatternd = Net() | ||||
| print(indices) | print(indices) | ||||
| @@ -23,7 +23,10 @@ from mindspore.common.initializer import initializer | |||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| import mindspore as ms | import mindspore as ms | ||||
| from mindspore.train.model import Model | from mindspore.train.model import Model | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Select(Cell): | class Select(Cell): | ||||
| def __init__(self, dtype): | def __init__(self, dtype): | ||||
| super(Select, self).__init__() | super(Select, self).__init__() | ||||
| @@ -32,6 +35,7 @@ class Select(Cell): | |||||
| def construct(self, cond, inputa, inputb): | def construct(self, cond, inputa, inputb): | ||||
| return self.select(cond, inputa, inputb) | return self.select(cond, inputa, inputb) | ||||
| def me_select(cond, inputa, inputb, dtype=ms.float32): | def me_select(cond, inputa, inputb, dtype=ms.float32): | ||||
| net = Select(dtype) | net = Select(dtype) | ||||
| net.set_train() | net.set_train() | ||||
| @@ -45,9 +49,10 @@ def me_select(cond, inputa, inputb, dtype=ms.float32): | |||||
| out = model.predict(Tensor(cond), inputa, inputb) | out = model.predict(Tensor(cond), inputa, inputb) | ||||
| return out.asnumpy() | return out.asnumpy() | ||||
| def cmp_select(input_cond,inputa,inputb): | |||||
| cond = input_cond > 0.5 | |||||
| def cmp_select(input_cond, inputa, inputb): | |||||
| cond = input_cond > 0.5 | |||||
| out_me = me_select(cond, inputa, inputb) | out_me = me_select(cond, inputa, inputb) | ||||
| print(input_cond) | print(input_cond) | ||||
| print(cond) | print(cond) | ||||
| @@ -55,9 +60,9 @@ def cmp_select(input_cond,inputa,inputb): | |||||
| print(inputb) | print(inputb) | ||||
| print(out_me) | print(out_me) | ||||
| def test_select_2_2(): | def test_select_2_2(): | ||||
| input_cond = np.random.rand(2, 2) | input_cond = np.random.rand(2, 2) | ||||
| inputa = np.random.randn(2,2).astype(np.float32) | |||||
| inputb = np.random.randn(2,2).astype(np.float32) | |||||
| cmp_select(input_cond,inputa,inputb) | |||||
| inputa = np.random.randn(2, 2).astype(np.float32) | |||||
| inputb = np.random.randn(2, 2).astype(np.float32) | |||||
| cmp_select(input_cond, inputa, inputb) | |||||
| @@ -18,8 +18,10 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -29,6 +31,7 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.sigmoid(x) | return self.sigmoid(x) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.random(size=(2, 3)).astype(np.float32) | x = np.random.random(size=(2, 3)).astype(np.float32) | ||||
| sigmoid = Net() | sigmoid = Net() | ||||
| @@ -21,6 +21,7 @@ import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -22,6 +22,7 @@ import mindspore.context as context | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -19,6 +19,7 @@ from mindspore.ops.composite import GradOperation | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -42,6 +43,7 @@ class Grad(nn.Cell): | |||||
| def construct(self, x, y): | def construct(self, x, y): | ||||
| return self.grad(self.network)(x, y) | return self.grad(self.network)(x, y) | ||||
| def test_net(): | def test_net(): | ||||
| x = np.random.random(size=(2, 3, 4, 5, 6)).astype(np.float32) | x = np.random.random(size=(2, 3, 4, 5, 6)).astype(np.float32) | ||||
| y = np.random.random(size=(2, 3, 4, 5, 6)).astype(np.float32) | y = np.random.random(size=(2, 3, 4, 5, 6)).astype(np.float32) | ||||
| @@ -49,4 +51,3 @@ def test_net(): | |||||
| output = net(Tensor(x), Tensor(y)) | output = net(Tensor(x), Tensor(y)) | ||||
| print("=================output====================") | print("=================output====================") | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| @@ -20,26 +20,28 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Slice(nn.Cell): | class Slice(nn.Cell): | ||||
| def __init__( self): | |||||
| def __init__(self): | |||||
| super(Slice, self).__init__() | super(Slice, self).__init__() | ||||
| self.cat = P.Slice() | self.cat = P.Slice() | ||||
| self.x1 = Parameter(initializer( | self.x1 = Parameter(initializer( | ||||
| Tensor(np.array([[[1, -1, 1], [2, -2, 2]], [[3, -3, 3], [4, -4, 4]], [[5, -5, 5], [6, -6, 6]]]).astype(np.float32)), [3,2,3]), name='x1') | |||||
| Tensor(np.array([[[1, -1, 1], [2, -2, 2]], [[3, -3, 3], [4, -4, 4]], [[5, -5, 5], [6, -6, 6]]]).astype( | |||||
| np.float32)), [3, 2, 3]), name='x1') | |||||
| @ms_function | @ms_function | ||||
| def construct(self): | def construct(self): | ||||
| return self.cat(self.x1, (0,1, 0), (2, 1, 3)) | |||||
| return self.cat(self.x1, (0, 1, 0), (2, 1, 3)) | |||||
| def test_slice(): | def test_slice(): | ||||
| cat = Slice() | cat = Slice() | ||||
| output = cat() | output = cat() | ||||
| expect = [[[2., -2., 2.]], | |||||
| [[4., -4., 4.]]] | |||||
| expect = [[[2., -2., 2.]], | |||||
| [[4., -4., 4.]]] | |||||
| print(output) | print(output) | ||||
| assert (output.asnumpy() == expect).all() | |||||
| assert (output.asnumpy() == expect).all() | |||||
| @@ -18,6 +18,7 @@ import mindspore.nn as nn | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore import Tensor | from mindspore import Tensor | ||||
| from mindspore.ops import operations as P | from mindspore.ops import operations as P | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| @@ -31,6 +31,7 @@ class Net(nn.Cell): | |||||
| def construct(self, pred, gt): | def construct(self, pred, gt): | ||||
| return self.SmoothL1Loss(pred, gt) | return self.SmoothL1Loss(pred, gt) | ||||
| class Grad(nn.Cell): | class Grad(nn.Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -20,17 +20,22 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.Softmax = P.Softmax() | self.Softmax = P.Softmax() | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.Softmax(x) | return self.Softmax(x) | ||||
| x = np.array([[5, 1]]).astype(np.float32) | x = np.array([[5, 1]]).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| softmax = Net() | softmax = Net() | ||||
| output = softmax(Tensor(x)) | output = softmax(Tensor(x)) | ||||
| @@ -18,6 +18,7 @@ import mindspore.nn as nn | |||||
| from mindspore.common.api import ms_function | from mindspore.common.api import ms_function | ||||
| import numpy as np | import numpy as np | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| @@ -36,4 +37,4 @@ def test_net(): | |||||
| labels = np.random.randn(32, 1001).astype(np.float16) | labels = np.random.randn(32, 1001).astype(np.float16) | ||||
| SoftmaxCrossEntropyWithLogits = Net() | SoftmaxCrossEntropyWithLogits = Net() | ||||
| output = SoftmaxCrossEntropyWithLogits(Tensor(features), Tensor(labels)) | output = SoftmaxCrossEntropyWithLogits(Tensor(features), Tensor(labels)) | ||||
| #print(output.asnumpy()) | |||||
| # print(output.asnumpy()) | |||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -29,7 +32,8 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.split(x) | return self.split(x) | ||||
| x = np.random.randn(2,4).astype(np.float32) | |||||
| x = np.random.randn(2, 4).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| @@ -20,17 +20,22 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.sqrt = P.Sqrt() | self.sqrt = P.Sqrt() | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.sqrt(x) | return self.sqrt(x) | ||||
| x = np.array([1.0, 4.0, 9.0]).astype(np.float32) | x = np.array([1.0, 4.0, 9.0]).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| sqrt = Net() | sqrt = Net() | ||||
| output = sqrt(Tensor(x)) | output = sqrt(Tensor(x)) | ||||
| @@ -20,17 +20,22 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.square = P.Square() | self.square = P.Square() | ||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.square(x) | return self.square(x) | ||||
| x = np.array([1.0, 4.0, 9.0]).astype(np.float32) | x = np.array([1.0, 4.0, 9.0]).astype(np.float32) | ||||
| def test_net(): | def test_net(): | ||||
| square = Net() | square = Net() | ||||
| output = square(Tensor(x)) | output = square(Tensor(x)) | ||||
| @@ -19,7 +19,10 @@ from mindspore.nn import Cell | |||||
| from mindspore.train.model import Model | from mindspore.train.model import Model | ||||
| import pytest | import pytest | ||||
| import mindspore.context as context | import mindspore.context as context | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(Cell): | class Net(Cell): | ||||
| def __init__(self, begin, end, stride): | def __init__(self, begin, end, stride): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -32,6 +35,7 @@ class Net(Cell): | |||||
| x = self.stridedslice(input, self.begin, self.end, self.stride) | x = self.stridedslice(input, self.begin, self.end, self.stride) | ||||
| return x | return x | ||||
| def me_stridedslice(input1, begin, end, stride): | def me_stridedslice(input1, begin, end, stride): | ||||
| input_me = Tensor(input1) | input_me = Tensor(input1) | ||||
| net = Net(begin, end, stride) | net = Net(begin, end, stride) | ||||
| @@ -40,17 +44,19 @@ def me_stridedslice(input1, begin, end, stride): | |||||
| output = model.predict(input_me) | output = model.predict(input_me) | ||||
| print(output.asnumpy()) | print(output.asnumpy()) | ||||
| def test_stridedslice_input_2d(): | def test_stridedslice_input_2d(): | ||||
| input = np.random.randn(5, 5).astype(np.int32) | input = np.random.randn(5, 5).astype(np.int32) | ||||
| begin = (0,0) | |||||
| end = (2,2) | |||||
| stride = (1,1) | |||||
| begin = (0, 0) | |||||
| end = (2, 2) | |||||
| stride = (1, 1) | |||||
| me_stridedslice(input, begin, end, stride) | me_stridedslice(input, begin, end, stride) | ||||
| def test_stridedslice_input_3d(): | def test_stridedslice_input_3d(): | ||||
| input = np.random.randn(5, 5, 5).astype(np.float32) | input = np.random.randn(5, 5, 5).astype(np.float32) | ||||
| begin = (0,0,0) | |||||
| end = (3,3,3) | |||||
| stride = (1,1,1) | |||||
| begin = (0, 0, 0) | |||||
| end = (3, 3, 3) | |||||
| stride = (1, 1, 1) | |||||
| me_stridedslice(input, begin, end, stride) | me_stridedslice(input, begin, end, stride) | ||||
| @@ -19,8 +19,10 @@ from mindspore.nn import Cell | |||||
| from mindspore.ops.composite import GradOperation | from mindspore.ops.composite import GradOperation | ||||
| from mindspore import context | from mindspore import context | ||||
| import pytest | import pytest | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Grad(Cell): | class Grad(Cell): | ||||
| def __init__(self, network): | def __init__(self, network): | ||||
| super(Grad, self).__init__() | super(Grad, self).__init__() | ||||
| @@ -31,6 +33,7 @@ class Grad(Cell): | |||||
| gout = self.grad(self.network)(input, output_grad) | gout = self.grad(self.network)(input, output_grad) | ||||
| return gout | return gout | ||||
| class Net(Cell): | class Net(Cell): | ||||
| def __init__(self, begin, end, stride): | def __init__(self, begin, end, stride): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -43,6 +46,7 @@ class Net(Cell): | |||||
| x = self.stridedslice(input, self.begin, self.end, self.stride) | x = self.stridedslice(input, self.begin, self.end, self.stride) | ||||
| return x | return x | ||||
| def me_stridedslice(input, begin, end, stride, gradients): | def me_stridedslice(input, begin, end, stride, gradients): | ||||
| input_me = Tensor(input) | input_me = Tensor(input) | ||||
| out_grad_me = Tensor(gradients) | out_grad_me = Tensor(gradients) | ||||
| @@ -51,6 +55,7 @@ def me_stridedslice(input, begin, end, stride, gradients): | |||||
| out_grad = net_me(input_me, out_grad_me) | out_grad = net_me(input_me, out_grad_me) | ||||
| print(out_grad.asnumpy()) | print(out_grad.asnumpy()) | ||||
| def test_grad_stridedslice_1d(): | def test_grad_stridedslice_1d(): | ||||
| input = np.random.randn(2).astype(np.float32) | input = np.random.randn(2).astype(np.float32) | ||||
| begin = (0,) | begin = (0,) | ||||
| @@ -20,17 +20,21 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| self.sub = P.Sub() | self.sub = P.Sub() | ||||
| def construct(self, x, y): | def construct(self, x, y): | ||||
| return self.sub(x, y) | return self.sub(x, y) | ||||
| x = np.random.randn(1,3,3,4).astype(np.float32) | |||||
| y = np.random.randn(1,3,3,4).astype(np.float32) | |||||
| x = np.random.randn(1, 3, 3, 4).astype(np.float32) | |||||
| y = np.random.randn(1, 3, 3, 4).astype(np.float32) | |||||
| def test_net(): | def test_net(): | ||||
| @@ -21,6 +21,7 @@ from mindspore.ops import operations as P | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -29,9 +30,12 @@ class Net(nn.Cell): | |||||
| def construct(self, x): | def construct(self, x): | ||||
| return self.tanh(x) | return self.tanh(x) | ||||
| input_shape = [1] | input_shape = [1] | ||||
| input_np = np.random.randn(*input_shape).astype(np.float32) | input_np = np.random.randn(*input_shape).astype(np.float32) | ||||
| input_me = Tensor(input_np) | input_me = Tensor(input_np) | ||||
| def test_net(): | def test_net(): | ||||
| context.set_context(mode=context.GRAPH_MODE) | context.set_context(mode=context.GRAPH_MODE) | ||||
| tanh = Net() | tanh = Net() | ||||
| @@ -40,4 +44,4 @@ def test_net(): | |||||
| out = m.predict(input_me) | out = m.predict(input_me) | ||||
| print("out_me.dtype={}".format(out.dtype)) | print("out_me.dtype={}".format(out.dtype)) | ||||
| print("out_me.asnumpy={}".format(out.asnumpy())) | print("out_me.asnumpy={}".format(out.asnumpy())) | ||||
| return out.asnumpy() | |||||
| return out.asnumpy() | |||||
| @@ -22,6 +22,7 @@ from mindspore.ops.operations import _grad_ops as G | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -30,9 +31,12 @@ class Net(nn.Cell): | |||||
| def construct(self, y, dy): | def construct(self, y, dy): | ||||
| return self.tanh_grad(y, dy) | return self.tanh_grad(y, dy) | ||||
| input_shape = [1] | input_shape = [1] | ||||
| input_np = np.random.randn(*input_shape).astype(np.float32) | input_np = np.random.randn(*input_shape).astype(np.float32) | ||||
| input_me = Tensor(input_np) | input_me = Tensor(input_np) | ||||
| def test_net(): | def test_net(): | ||||
| context.set_context(mode=context.GRAPH_MODE) | context.set_context(mode=context.GRAPH_MODE) | ||||
| tanh_grad = Net() | tanh_grad = Net() | ||||
| @@ -41,4 +45,4 @@ def test_net(): | |||||
| out = m.predict(input_me, input_me) | out = m.predict(input_me, input_me) | ||||
| print("out_me.dtype={}".format(out.dtype)) | print("out_me.dtype={}".format(out.dtype)) | ||||
| print("out_me.asnumpy={}".format(out.asnumpy())) | print("out_me.asnumpy={}".format(out.asnumpy())) | ||||
| return out.asnumpy() | |||||
| return out.asnumpy() | |||||
| @@ -20,6 +20,7 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| @@ -20,7 +20,10 @@ import numpy as np | |||||
| import mindspore.context as context | import mindspore.context as context | ||||
| from mindspore.common.initializer import initializer | from mindspore.common.initializer import initializer | ||||
| from mindspore.common.parameter import Parameter | from mindspore.common.parameter import Parameter | ||||
| context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") | ||||
| class Net(nn.Cell): | class Net(nn.Cell): | ||||
| def __init__(self, k): | def __init__(self, k): | ||||
| super(Net, self).__init__() | super(Net, self).__init__() | ||||
| @@ -32,7 +35,7 @@ class Net(nn.Cell): | |||||
| def test_net(): | def test_net(): | ||||
| x = np.random.randn(4,4).astype(np.float16) | |||||
| x = np.random.randn(4, 4).astype(np.float16) | |||||
| k = 2 | k = 2 | ||||
| TopK = Net(k) | TopK = Net(k) | ||||
| output = TopK(Tensor(x)) | output = TopK(Tensor(x)) | ||||
| @@ -41,4 +44,3 @@ def test_net(): | |||||
| print("***********output y*********") | print("***********output y*********") | ||||
| print(output[0].asnumpy()) | print(output[0].asnumpy()) | ||||