| @@ -211,3 +211,48 @@ def test_single_for_04(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_single_for_05(): | |||||
| class SingleForNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.assign = P.Assign() | |||||
| self.param_a = Parameter(Tensor(np.array(5), mstype.int32), name='a') | |||||
| self.param_b = Parameter(Tensor(np.array(2), mstype.int32), name='b') | |||||
| def construct(self, x): | |||||
| self.assign(self.param_a, x + self.param_a) | |||||
| for _ in range(0, 3): | |||||
| self.assign(self.param_b, x - self.param_a) | |||||
| return x | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([6], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| single_for_net = SingleForNet() | |||||
| net = GradNet(single_for_net) | |||||
| graph_forward_res = single_for_net(x) | |||||
| graph_backward_res = net(x) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| single_for_net = SingleForNet() | |||||
| net = GradNet(single_for_net) | |||||
| pynative_forward_res = single_for_net(x) | |||||
| pynative_backward_res = net(x) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -219,3 +219,54 @@ def test_for_in_if_04(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_in_if_05(): | |||||
| class ForInIfNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.param_a = Parameter(Tensor(5, mstype.int32), name='a') | |||||
| self.param_b = Parameter(Tensor(4, mstype.int32), name='b') | |||||
| self.assign = P.Assign() | |||||
| def construct(self, x): | |||||
| out = self.param_a | |||||
| x = self.func(x) | |||||
| out *= x | |||||
| return out | |||||
| def func(self, x): | |||||
| if self.param_a > self.param_b: | |||||
| self.assign(self.param_a, self.param_b + self.param_a) | |||||
| for _ in range(0, 4): | |||||
| self.param_a += 1 | |||||
| self.assign(self.param_b, self.param_b - 4) | |||||
| x += self.param_b | |||||
| return x | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor(5, mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_in_if_net = ForInIfNet() | |||||
| net = GradNet(for_in_if_net) | |||||
| graph_forward_res = for_in_if_net(x) | |||||
| graph_backward_res = net(x) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_in_if_net = ForInIfNet() | |||||
| net = GradNet(for_in_if_net) | |||||
| pynative_forward_res = for_in_if_net(x) | |||||
| pynative_backward_res = net(x) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -23,7 +23,7 @@ from mindspore.common import dtype as mstype | |||||
| grad_all = C.GradOperation(get_all=True) | grad_all = C.GradOperation(get_all=True) | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_for_in_while(): | |||||
| def test_for_in_while_01(): | |||||
| class ForInWhileNet(nn.Cell): | class ForInWhileNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -73,3 +73,52 @@ def test_for_in_while(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_in_while_02(): | |||||
| class ForInWhileNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.assign = P.Assign() | |||||
| self.param_a = Parameter(Tensor(5, mstype.int32), name='a') | |||||
| self.param_b = Parameter(Tensor(7, mstype.int32), name='b') | |||||
| def construct(self, x): | |||||
| self.assign(self.param_a, x + self.param_a) | |||||
| while self.param_a > self.param_b: | |||||
| for _ in range(0, 3): | |||||
| x = self.add(x, self.param_a + self.param_b) | |||||
| self.assign(self.param_b, self.param_b + 1) | |||||
| y = self.sub(x, self.param_b) | |||||
| self.assign(self.param_a, y) | |||||
| return x | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([2], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_in_while_net = ForInWhileNet() | |||||
| net = GradNet(for_in_while_net) | |||||
| graph_forward_res = for_in_while_net(x) | |||||
| graph_backward_res = net(x) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_in_while_net = ForInWhileNet() | |||||
| net = GradNet(for_in_while_net) | |||||
| pynative_forward_res = for_in_while_net(x) | |||||
| pynative_backward_res = net(x) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -23,7 +23,7 @@ from mindspore.common import dtype as mstype | |||||
| grad_all = C.GradOperation(get_all=True) | grad_all = C.GradOperation(get_all=True) | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_for_in_for(): | |||||
| def test_for_in_for_01(): | |||||
| class ForInForNet(nn.Cell): | class ForInForNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -74,3 +74,51 @@ def test_for_in_for(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_in_for_02(): | |||||
| class ForInForNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.assign = P.Assign() | |||||
| self.param_a = Parameter(Tensor(5, mstype.int32), name='a') | |||||
| self.param_b = Parameter(Tensor(11, mstype.int32), name='b') | |||||
| def construct(self, x): | |||||
| for _ in range(0, 10): | |||||
| x = x * 2 | |||||
| self.assign(self.param_a, x + self.param_a) | |||||
| for _ in range(0, 5): | |||||
| x = self.add(x, x) | |||||
| self.param_b += 1 | |||||
| y = self.sub(x, self.param_b + self.param_a) | |||||
| return y | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([2], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_in_for_net = ForInForNet() | |||||
| net = GradNet(for_in_for_net) | |||||
| graph_forward_res = for_in_for_net(x) | |||||
| graph_backward_res = net(x) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_in_for_net = ForInForNet() | |||||
| net = GradNet(for_in_for_net) | |||||
| pynative_forward_res = for_in_for_net(x) | |||||
| pynative_backward_res = net(x) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -24,7 +24,7 @@ grad_all = C.GradOperation(get_all=True) | |||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_if_after_for(): | |||||
| def test_if_after_for_01(): | |||||
| class IfAfterForNet(nn.Cell): | class IfAfterForNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -76,3 +76,57 @@ def test_if_after_for(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_if_after_for_02(): | |||||
| class IfAfterForNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.relu = nn.ReLU() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.assign = P.Assign() | |||||
| self.param_a = Parameter(Tensor(5, mstype.int32), name='a') | |||||
| self.param_b = Parameter(Tensor(11, mstype.int32), name='b') | |||||
| def construct(self, x): | |||||
| self.assign(self.param_a, x + self.param_a) | |||||
| y = self.add(x, self.param_b) | |||||
| for _ in range(0, 2): | |||||
| x = self.sub(x, 2) | |||||
| self.assign(self.param_b, self.param_a + self.param_b - x) | |||||
| self.param_b = self.add(self.param_b, 2) | |||||
| if x < self.param_b: | |||||
| x = y - x | |||||
| y = self.mul(x, self.param_a) | |||||
| z = self.relu(x + y) | |||||
| return z | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([7], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| if_after_for_net = IfAfterForNet() | |||||
| net = GradNet(if_after_for_net) | |||||
| graph_forward_res = if_after_for_net(x) | |||||
| graph_backward_res = net(x) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| if_after_for_net = IfAfterForNet() | |||||
| net = GradNet(if_after_for_net) | |||||
| pynative_forward_res = if_after_for_net(x) | |||||
| pynative_backward_res = net(x) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -23,7 +23,7 @@ from mindspore.common import dtype as mstype | |||||
| grad_all = C.GradOperation(get_all=True) | grad_all = C.GradOperation(get_all=True) | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_for_after_for(): | |||||
| def test_for_after_for_01(): | |||||
| class ForAfterForNet(nn.Cell): | class ForAfterForNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -77,3 +77,56 @@ def test_for_after_for(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_after_for_02(): | |||||
| class ForAfterForNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.assign = P.Assign() | |||||
| param_a = np.full((1,), 5, dtype=np.int32) | |||||
| self.param_a = Parameter(Tensor(param_a), name='a') | |||||
| param_b = np.full((1,), 11, dtype=np.int32) | |||||
| self.param_b = Parameter(Tensor(param_b), name='b') | |||||
| def construct(self, x): | |||||
| self.assign(self.param_a, x + self.param_a) | |||||
| y = self.add(x, self.param_a) | |||||
| for _ in range(0, 2): | |||||
| x = self.sub(x, 3) | |||||
| self.assign(self.param_b, x + self.param_b) | |||||
| self.param_a = x + y | |||||
| for _ in range(0, 5): | |||||
| y = self.mul(x, self.param_a) | |||||
| x = x + self.param_a | |||||
| return y | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([7], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_after_for_net = ForAfterForNet() | |||||
| net = GradNet(for_after_for_net) | |||||
| graph_forward_res = for_after_for_net(x) | |||||
| graph_backward_res = net(x) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_after_for_net = ForAfterForNet() | |||||
| net = GradNet(for_after_for_net) | |||||
| pynative_forward_res = for_after_for_net(x) | |||||
| pynative_backward_res = net(x) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -23,7 +23,7 @@ from mindspore.common import dtype as mstype | |||||
| grad_all = C.GradOperation(get_all=True) | grad_all = C.GradOperation(get_all=True) | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_for_after_while_in_if(): | |||||
| def test_for_after_while_in_if_01(): | |||||
| class ForAfterWhileInIfNet(nn.Cell): | class ForAfterWhileInIfNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -90,3 +90,63 @@ def test_for_after_while_in_if(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_after_while_in_if_02(): | |||||
| class ForAfterWhileInIfNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.assign = P.Assign() | |||||
| param_a = np.full((1,), 5, dtype=np.int32) | |||||
| self.param_a = Parameter(Tensor(param_a), name='a') | |||||
| param_b = np.full((1,), 2, dtype=np.int32) | |||||
| self.param_b = Parameter(Tensor(param_b), name='b') | |||||
| param_c = np.full((1,), 11, dtype=np.int32) | |||||
| self.param_c = Parameter(Tensor(param_c), name='c') | |||||
| def construct(self, x, y): | |||||
| self.assign(self.param_a, x + self.param_a) | |||||
| y = self.add(y, self.param_b) | |||||
| if (self.param_b > (y - self.param_a)) and (self.param_b != self.param_a): | |||||
| x = y - self.param_a - self.param_b | |||||
| while self.param_a >= x: | |||||
| self.assign(self.param_c, self.param_a + 2) | |||||
| x = x + 2 | |||||
| self.param_b = self.sub(y, self.param_b) | |||||
| x = self.mul(self.param_b, self.param_c) | |||||
| for _ in range(0, 4): | |||||
| self.assign(self.param_b, y + self.param_b - x) | |||||
| y = x + self.param_a - self.param_b | |||||
| return y | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([11], mstype.int32) | |||||
| y = Tensor([7], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_after_while_in_if_net = ForAfterWhileInIfNet() | |||||
| net = GradNet(for_after_while_in_if_net) | |||||
| graph_forward_res = for_after_while_in_if_net(x, y) | |||||
| graph_backward_res = net(x, y) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_after_while_in_if_net = ForAfterWhileInIfNet() | |||||
| net = GradNet(for_after_while_in_if_net) | |||||
| pynative_forward_res = for_after_while_in_if_net(x, y) | |||||
| pynative_backward_res = net(x, y) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -23,7 +23,7 @@ from mindspore.common import dtype as mstype | |||||
| grad_all = C.GradOperation(get_all=True) | grad_all = C.GradOperation(get_all=True) | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_for_after_while_in_for(): | |||||
| def test_for_after_while_in_for_01(): | |||||
| class ForAfterWhileInForNet(nn.Cell): | class ForAfterWhileInForNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -91,3 +91,63 @@ def test_for_after_while_in_for(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_after_while_in_for_02(): | |||||
| class ForAfterWhileInForNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.div = P.Div() | |||||
| self.relu = nn.ReLU() | |||||
| self.assign = P.Assign() | |||||
| param_a = np.full((1,), 5, dtype=np.int32) | |||||
| self.param_a = Parameter(Tensor(param_a), name='a') | |||||
| param_b = np.full((1,), 2, dtype=np.int32) | |||||
| self.param_b = Parameter(Tensor(param_b), name='b') | |||||
| param_c = np.full((1,), 30, dtype=np.int32) | |||||
| self.param_c = Parameter(Tensor(param_c), name='c') | |||||
| def construct(self, x, y): | |||||
| self.assign(self.param_a, x + self.param_a) | |||||
| y = self.add(y, self.param_b) | |||||
| for _ in range(0, 10): | |||||
| self.param_b = self.add(self.param_c, self.param_b) | |||||
| while self.param_c > self.param_b: | |||||
| self.assign(self.param_b, self.param_b + self.param_a + 2) | |||||
| self.param_b = self.sub(y, self.param_b) | |||||
| x = self.mul(self.param_b, self.param_c) | |||||
| for _ in range(0, 4): | |||||
| y = y + self.param_b | |||||
| self.assign(self.param_b, x * 3 - y) | |||||
| return x | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([11], mstype.int32) | |||||
| y = Tensor([7], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_after_while_in_for_net = ForAfterWhileInForNet() | |||||
| net = GradNet(for_after_while_in_for_net) | |||||
| graph_forward_res = for_after_while_in_for_net(x, y) | |||||
| graph_backward_res = net(x, y) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_after_while_in_for_net = ForAfterWhileInForNet() | |||||
| net = GradNet(for_after_while_in_for_net) | |||||
| pynative_forward_res = for_after_while_in_for_net(x, y) | |||||
| pynative_backward_res = net(x, y) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -23,7 +23,7 @@ from mindspore.common import dtype as mstype | |||||
| grad_all = C.GradOperation(get_all=True) | grad_all = C.GradOperation(get_all=True) | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_for_after_for_in_while(): | |||||
| def test_for_after_for_in_while_01(): | |||||
| class ForAfterForInWhileNet(nn.Cell): | class ForAfterForInWhileNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -86,3 +86,56 @@ def test_for_after_for_in_while(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_after_for_in_while_02(): | |||||
| class ForAfterForInWhileNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.assign = P.Assign() | |||||
| self.param_a = Parameter(Tensor(5, mstype.int32), name='a') | |||||
| self.param_b = Parameter(Tensor(2, mstype.int32), name='b') | |||||
| self.param_c = Parameter(Tensor(-10, mstype.int32), name='c') | |||||
| def construct(self, x, y): | |||||
| while self.param_c > x: | |||||
| self.param_b = self.add(self.param_c, self.param_b) | |||||
| for _ in range(0, 20): | |||||
| self.assign(self.param_b, self.param_a + 2) | |||||
| self.assign(self.param_c, self.param_c - 1) | |||||
| x = x + 2 | |||||
| for _ in range(0, 4): | |||||
| self.assign(self.param_c, y + self.param_b) | |||||
| x = self.param_a - x - y | |||||
| return x | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([11], mstype.int32) | |||||
| y = Tensor([7], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_after_for_in_while_net = ForAfterForInWhileNet() | |||||
| net = GradNet(for_after_for_in_while_net) | |||||
| graph_forward_res = for_after_for_in_while_net(x, y) | |||||
| graph_backward_res = net(x, y) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_after_for_in_while_net = ForAfterForInWhileNet() | |||||
| net = GradNet(for_after_for_in_while_net) | |||||
| pynative_forward_res = for_after_for_in_while_net(x, y) | |||||
| pynative_backward_res = net(x, y) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||
| @@ -23,7 +23,7 @@ from mindspore.common import dtype as mstype | |||||
| grad_all = C.GradOperation(get_all=True) | grad_all = C.GradOperation(get_all=True) | ||||
| context.set_context(device_target="Ascend") | context.set_context(device_target="Ascend") | ||||
| def test_for_after_for_in_for(): | |||||
| def test_for_after_for_in_for_01(): | |||||
| class ForAfterForInForNet(nn.Cell): | class ForAfterForInForNet(nn.Cell): | ||||
| def __init__(self): | def __init__(self): | ||||
| super().__init__() | super().__init__() | ||||
| @@ -45,7 +45,7 @@ def test_for_after_for_in_for(): | |||||
| for _ in range(0, 4): | for _ in range(0, 4): | ||||
| self.param_b = self.add(self.param_c, self.param_b) | self.param_b = self.add(self.param_c, self.param_b) | ||||
| for _ in range(0, 8): | for _ in range(0, 8): | ||||
| self.param_b = self.param_a + j | |||||
| self.param_b = self.param_a + x | |||||
| self.param_c = self.param_a * self.param_b | self.param_c = self.param_a * self.param_b | ||||
| for _ in range(0, 3): | for _ in range(0, 3): | ||||
| @@ -82,3 +82,59 @@ def test_for_after_for_in_for(): | |||||
| assert graph_forward_res == pynative_forward_res | assert graph_forward_res == pynative_forward_res | ||||
| assert graph_backward_res == pynative_backward_res | assert graph_backward_res == pynative_backward_res | ||||
| def test_for_after_for_in_for_02(): | |||||
| class ForAfterForInForNet(nn.Cell): | |||||
| def __init__(self): | |||||
| super().__init__() | |||||
| self.mul = P.Mul() | |||||
| self.add = P.Add() | |||||
| self.sub = P.Sub() | |||||
| self.div = P.Div() | |||||
| self.assign = P.Assign() | |||||
| self.param_a = Parameter(Tensor(5, mstype.int32), name='a') | |||||
| self.param_b = Parameter(Tensor(2, mstype.int32), name='b') | |||||
| self.param_c = Parameter(Tensor(20, mstype.int32), name='c') | |||||
| def construct(self, x, y): | |||||
| for _ in range(0, 6): | |||||
| self.param_b = self.add(self.param_c, self.param_b) | |||||
| for _ in range(0, 2): | |||||
| self.assign(self.param_b, self.param_a + x) | |||||
| self.assign(self.param_c, self.param_a * self.param_b) | |||||
| for _ in range(0, 3): | |||||
| y = y + self.param_b | |||||
| x = self.relu(self.param_c * 3) | |||||
| self.assign(self.param_b, x - y) | |||||
| z = y + self.param_b | |||||
| return z | |||||
| class GradNet(nn.Cell): | |||||
| def __init__(self, net): | |||||
| super(GradNet, self).__init__() | |||||
| self.net = net | |||||
| def construct(self, *inputs): | |||||
| return grad_all(self.net)(*inputs) | |||||
| x = Tensor([11], mstype.int32) | |||||
| y = Tensor([7], mstype.int32) | |||||
| # graph mode | |||||
| context.set_context(mode=context.GRAPH_MODE) | |||||
| for_after_for_in_for_net = ForAfterForInForNet() | |||||
| net = GradNet(for_after_for_in_for_net) | |||||
| graph_forward_res = for_after_for_in_for_net(x, y) | |||||
| graph_backward_res = net(x, y) | |||||
| # pynative mode | |||||
| context.set_context(mode=context.PYNATIVE_MODE) | |||||
| for_after_for_in_for_net = ForAfterForInForNet() | |||||
| net = GradNet(for_after_for_in_for_net) | |||||
| pynative_forward_res = for_after_for_in_for_net(x, y) | |||||
| pynative_backward_res = net(x, y) | |||||
| assert graph_forward_res == pynative_forward_res | |||||
| assert graph_backward_res == pynative_backward_res | |||||