From: @lanzhineng Reviewed-by: @ginfung,@zh_qh Signed-off-by: @zh_qhpull/15520/MERGE
| @@ -0,0 +1,72 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| j = self.i | |||||
| while j < self.max_cycles: | |||||
| for _ in range(0, self.max_cycles): | |||||
| out = x * y + out | |||||
| j = j + 1 | |||||
| i = self.i | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,71 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| for _ in range(0, self.max_cycles): | |||||
| for _ in range(0, self.max_cycles): | |||||
| out = x * y + out | |||||
| i = self.i | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,69 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| i = self.i | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| for _ in range(0, self.max_cycles): | |||||
| out = x * y + out | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,73 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.param_a = Tensor(np.array(50), mstype.int32) | |||||
| self.param_b = Tensor(np.array(1), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| i = self.i | |||||
| while i < self.max_cycles: | |||||
| if out > self.param_a: | |||||
| out = x * y - out | |||||
| out = x * y + self.param_b | |||||
| i = i + 1 | |||||
| for _ in range(0, self.max_cycles): | |||||
| out = x * y + out | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,74 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| i = self.i | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| i = self.i | |||||
| while i < self.max_cycles: | |||||
| j = self.i | |||||
| while j < self.max_cycles: | |||||
| out = x * y + out | |||||
| j = j + 1 | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||