From: @chenfei52 Reviewed-by: @zh_qh,@ginfung Signed-off-by: @zh_qhpull/15462/MERGE
| @@ -43,7 +43,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -54,8 +54,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -63,5 +68,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -40,7 +40,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -51,8 +51,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -60,5 +65,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -44,7 +44,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -55,8 +55,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -64,5 +69,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -44,7 +44,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -55,8 +55,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -64,5 +69,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -45,7 +45,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -56,8 +56,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -65,5 +70,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -46,7 +46,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -57,8 +57,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -66,5 +71,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -47,7 +47,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -58,8 +58,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -67,5 +72,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -31,10 +31,10 @@ class ForwardNet(nn.Cell): | |||
| def construct(self, x, y): | |||
| out = self.zero | |||
| while x < y: | |||
| for _ in range(0, self.max_cycles): | |||
| for _ in range(0, self.max_cycles): | |||
| while x < y: | |||
| out = x * y + out | |||
| x = x + 1 | |||
| x = x + 1 | |||
| if out > 20: | |||
| out = out - 20 | |||
| return out | |||
| @@ -44,7 +44,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -55,8 +55,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -64,5 +69,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -44,7 +44,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -55,8 +55,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -64,5 +69,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -45,7 +45,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -56,8 +56,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -65,5 +70,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -45,7 +45,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -56,8 +56,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -65,5 +70,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -37,6 +37,7 @@ class ForwardNet(nn.Cell): | |||
| out = x * y + out | |||
| while i < self.max_cycles: | |||
| out = out + 10 | |||
| i = i + 1 | |||
| return out | |||
| @@ -44,7 +45,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -52,17 +53,28 @@ class BackwardNet(nn.Cell): | |||
| def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| x = Tensor(np.array(3), mstype.int32) | |||
| y = Tensor(np.array(5), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=10) | |||
| x = Tensor(np.array(3), mstype.int32) | |||
| y = Tensor(np.array(5), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -46,7 +46,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -57,8 +57,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -66,5 +71,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||
| @@ -43,7 +43,7 @@ class BackwardNet(nn.Cell): | |||
| def __init__(self, net): | |||
| super(BackwardNet, self).__init__(auto_prefix=False) | |||
| self.forward_net = net | |||
| self.grad = C.GradOperation() | |||
| self.grad = C.GradOperation(get_all=True) | |||
| def construct(self, *inputs): | |||
| grads = self.grad(self.forward_net)(*inputs) | |||
| @@ -54,8 +54,13 @@ def test_forward(): | |||
| x = Tensor(np.array(1), mstype.int32) | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| out = forward_net(x, y) | |||
| print("forward out:", out) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_out = forward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_out = forward_net(x, y) | |||
| assert graph_mode_out == pynative_mode_out | |||
| def test_backward(): | |||
| @@ -63,5 +68,10 @@ def test_backward(): | |||
| y = Tensor(np.array(3), mstype.int32) | |||
| forward_net = ForwardNet(max_cycles=3) | |||
| backward_net = BackwardNet(forward_net) | |||
| grads = backward_net(x, y) | |||
| print("grads:", grads) | |||
| # Graph Mode | |||
| context.set_context(mode=context.GRAPH_MODE) | |||
| graph_mode_grads = backward_net(x, y) | |||
| # Pynative Mode | |||
| context.set_context(mode=context.PYNATIVE_MODE) | |||
| pynative_mode_grads = backward_net(x, y) | |||
| assert graph_mode_grads == pynative_mode_grads | |||