From: @chenfei52 Reviewed-by: @ginfung Signed-off-by:pull/14641/MERGE
| @@ -0,0 +1,60 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=True, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def construct(self, x, y): | |||||
| y = y + 10 | |||||
| while x < y: | |||||
| x = (x + 2) * (y - 9) | |||||
| y = y + 2 | |||||
| x = x + 5 | |||||
| return x | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, forward_net): | |||||
| super(BackwardNet, self).__init__() | |||||
| self.forward_net = forward_net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| c1 = Tensor([0], mstype.int32) | |||||
| c2 = Tensor([0], mstype.int32) | |||||
| expect = Tensor([75], mstype.int32) | |||||
| forward_net = ForwardNet() | |||||
| output = forward_net(c1, c2) | |||||
| assert expect == output | |||||
| def test_backward(): | |||||
| c1 = Tensor([0], mstype.int32) | |||||
| c2 = Tensor([0], mstype.int32) | |||||
| expect = Tensor([75], mstype.int32) | |||||
| forward_net = ForwardNet() | |||||
| output = forward_net(c1, c2) | |||||
| assert expect == output | |||||
| @@ -0,0 +1,67 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| while i < self.max_cycles: | |||||
| if out <= 20: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,114 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| if out > 20: | |||||
| break | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| class ForwardNetReplaceBreak(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNetReplaceBreak, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| while i < self.max_cycles and out <= 20: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNetReplaceBreak(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNetReplaceBreak, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| # Problem: Exceed function call depth limit 1000. | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| def test_forward_replace_break(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNetReplaceBreak(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| # Problem: Exceed function call depth limit 1000. | |||||
| def test_backward_replace_break(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNetReplaceBreak(max_cycles=10) | |||||
| backward_net = BackwardNetReplaceBreak(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,64 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| for _ in range(self.max_cycles): | |||||
| if out <= 20: | |||||
| out = x * y + out | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,65 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| for _ in range(self.max_cycles): | |||||
| out = x * y + out | |||||
| if out > 20: | |||||
| break | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,68 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| i = self.i | |||||
| if x > y: | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,68 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| if out >= 20: | |||||
| out = out - 20 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,69 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| while i < self.max_cycles: | |||||
| if out <= 20: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| if out >= 30: | |||||
| out = out - 30 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,70 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| i = self.i | |||||
| if x > y: | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| if out > 20: | |||||
| out = out - 20 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,71 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| i = self.i | |||||
| while x < y: | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| x = x + 1 | |||||
| if out > 20: | |||||
| out = out - 20 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,68 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| while x < y: | |||||
| for _ in range(0, self.max_cycles): | |||||
| out = x * y + out | |||||
| x = x + 1 | |||||
| if out > 20: | |||||
| out = out - 20 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,68 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| if out >= 20: | |||||
| out = out - 20 | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,69 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| if x < y: | |||||
| if out >= 20: | |||||
| out = out - 20 | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,69 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| while i < self.max_cycles: | |||||
| if out <= 20: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| while i < self.max_cycles: | |||||
| out = out + 10 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,68 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| i = self.i | |||||
| out = self.zero | |||||
| for _ in range(0, self.max_cycles): | |||||
| if out <= 20: | |||||
| out = x * y + out | |||||
| while i < self.max_cycles: | |||||
| out = out + 10 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=10) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,70 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| self.i = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| i = self.i | |||||
| if x > y: | |||||
| while i < self.max_cycles: | |||||
| out = x * y + out | |||||
| i = i + 1 | |||||
| while out > 20: | |||||
| out = out - 20 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||
| @@ -0,0 +1,67 @@ | |||||
| # Copyright 2020 Huawei Technologies Co., Ltd | |||||
| # | |||||
| # Licensed under the Apache License, Version 2.0 (the "License"); | |||||
| # you may not use this file except in compliance with the License. | |||||
| # You may obtain a copy of the License at | |||||
| # | |||||
| # http://www.apache.org/licenses/LICENSE-2.0 | |||||
| # | |||||
| # Unless required by applicable law or agreed to in writing, software | |||||
| # distributed under the License is distributed on an "AS IS" BASIS, | |||||
| # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
| # See the License for the specific language governing permissions and | |||||
| # limitations under the License. | |||||
| # ============================================================================ | |||||
| import numpy as np | |||||
| from mindspore.common import dtype as mstype | |||||
| from mindspore import nn | |||||
| from mindspore import Tensor | |||||
| from mindspore.ops import composite as C | |||||
| from mindspore import context | |||||
| context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") | |||||
| class ForwardNet(nn.Cell): | |||||
| def __init__(self, max_cycles=10): | |||||
| super(ForwardNet, self).__init__() | |||||
| self.max_cycles = max_cycles | |||||
| self.zero = Tensor(np.array(0), mstype.int32) | |||||
| def construct(self, x, y): | |||||
| out = self.zero | |||||
| if x > y: | |||||
| for _ in range(0, self.max_cycles): | |||||
| out = x * y + out | |||||
| while out > 20: | |||||
| out = out - 20 | |||||
| return out | |||||
| class BackwardNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||||
| self.forward_net = net | |||||
| self.grad = C.GradOperation() | |||||
| def construct(self, *inputs): | |||||
| grads = self.grad(self.forward_net)(*inputs) | |||||
| return grads | |||||
| def test_forward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| out = forward_net(x, y) | |||||
| print("forward out:", out) | |||||
| def test_backward(): | |||||
| x = Tensor(np.array(1), mstype.int32) | |||||
| y = Tensor(np.array(3), mstype.int32) | |||||
| forward_net = ForwardNet(max_cycles=3) | |||||
| backward_net = BackwardNet(forward_net) | |||||
| grads = backward_net(x, y) | |||||
| print("grads:", grads) | |||||