Browse Source

more complex basic case

pull/15780/head
chenfei 4 years ago
parent
commit
ae87a8ace3
15 changed files with 317 additions and 113 deletions
  1. +21
    -12
      tests/st/control/inner/test_011_if_in_while.py
  2. +17
    -9
      tests/st/control/inner/test_012_if_in_for.py
  3. +16
    -8
      tests/st/control/inner/test_020_while_in_if.py
  4. +19
    -8
      tests/st/control/inner/test_101_if_after_while.py
  5. +1
    -0
      tests/st/control/inner/test_102_if_after_for.py
  6. +16
    -8
      tests/st/control/inner/test_111_if_after_if_in_while.py
  7. +16
    -9
      tests/st/control/inner/test_120_if_after_while_in_if.py
  8. +66
    -9
      tests/st/control/inner/test_121_if_after_while_in_while.py
  9. +15
    -8
      tests/st/control/inner/test_122_if_after_while_in_for.py
  10. +15
    -8
      tests/st/control/inner/test_200_while_after_if.py
  11. +15
    -8
      tests/st/control/inner/test_210_while_after_if_in_if.py
  12. +18
    -8
      tests/st/control/inner/test_211_while_after_if_in_while.py
  13. +62
    -9
      tests/st/control/inner/test_212_while_after_if_in_for.py
  14. +5
    -1
      tests/st/control/inner/test_220_while_after_while_in_if.py
  15. +15
    -8
      tests/st/control/inner/test_230_while_after_for_in_if.py

+ 21
- 12
tests/st/control/inner/test_011_if_in_while.py View File

@@ -18,6 +18,8 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.ops import functional as F
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,6 +30,7 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
i = self.i i = self.i
@@ -35,8 +38,11 @@ class ForwardNet(nn.Cell):
while i < self.max_cycles: while i < self.max_cycles:
if out <= 20: if out <= 20:
out = x * y + out out = x * y + out
# use F.Assign will throw NameSpace error.
F.assign(self.weight, i)
self.weight = i
i = i + 1 i = i + 1
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -51,27 +57,30 @@ class BackwardNet(nn.Cell):




def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 17
- 9
tests/st/control/inner/test_012_if_in_for.py View File

@@ -18,6 +18,8 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter
from mindspore.ops import functional as F


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -27,13 +29,16 @@ class ForwardNet(nn.Cell):
super(ForwardNet, self).__init__() super(ForwardNet, self).__init__()
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
out = self.zero out = self.zero
for _ in range(self.max_cycles):
for i in range(self.max_cycles):
if out <= 20: if out <= 20:
self.weight = out
F.assign(self.weight, i)
out = x * y + out out = x * y + out
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -50,25 +55,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 16
- 8
tests/st/control/inner/test_020_while_in_if.py View File

@@ -19,6 +19,8 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter
from mindspore.ops import functional as F


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -29,15 +31,18 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
out = self.zero out = self.zero
i = self.i i = self.i
if x > y: if x > y:
while i < self.max_cycles: while i < self.max_cycles:
self.weight = i
F.assign(self.weight, i)
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -54,25 +59,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 19
- 8
tests/st/control/inner/test_101_if_after_while.py View File

@@ -18,6 +18,8 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter
from mindspore.ops import functional as F


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,16 +30,21 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
i = self.i i = self.i
out = self.zero out = self.zero
while i < self.max_cycles: while i < self.max_cycles:
F.assign(self.weight, i)
self.weight = i
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
if out >= 20: if out >= 20:
F.assign(self.weight, out)
self.weight = out
out = out - 20 out = out - 20
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -54,25 +61,29 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 1
- 0
tests/st/control/inner/test_102_if_after_for.py View File

@@ -23,6 +23,7 @@ from mindspore.common import dtype as mstype
grad_all = C.GradOperation(get_all=True) grad_all = C.GradOperation(get_all=True)
context.set_context(device_target="Ascend") context.set_context(device_target="Ascend")



def test_if_after_for(): def test_if_after_for():
class IfAfterForNet(nn.Cell): class IfAfterForNet(nn.Cell):
def __init__(self): def __init__(self):


+ 16
- 8
tests/st/control/inner/test_111_if_after_if_in_while.py View File

@@ -18,6 +18,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,17 +29,21 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
i = self.i i = self.i
out = self.zero out = self.zero
while i < self.max_cycles: while i < self.max_cycles:
self.weight = i
if out <= 20: if out <= 20:
self.weight = i
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
if out >= 30: if out >= 30:
self.weight = out
out = out - 30 out = out - 30
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -55,25 +60,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 16
- 9
tests/st/control/inner/test_120_if_after_while_in_if.py View File

@@ -19,6 +19,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -29,6 +30,7 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
out = self.zero out = self.zero
@@ -37,9 +39,11 @@ class ForwardNet(nn.Cell):
while i < self.max_cycles: while i < self.max_cycles:
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
if out > 20:
self.weight = i
if out < 20:
self.weight = out
out = out - 20 out = out - 20
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -56,25 +60,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 66
- 9
tests/st/control/inner/test_121_if_after_while_in_while.py View File

@@ -19,6 +19,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -29,18 +30,22 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
out = self.zero out = self.zero
i = self.i i = self.i
while x < y: while x < y:
self.weight = x
while i < self.max_cycles: while i < self.max_cycles:
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
self.weight = i
x = x + 1 x = x + 1
if out > 20:
if out < 20:
self.weight = out
out = out - 20 out = out - 20
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -57,25 +62,77 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads


class ForwardNetNoAssign(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNetNoAssign, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))

def construct(self, x, y):
out = self.zero
i = self.i
while x < y:
while i < self.max_cycles:
out = x * y + out
i = i + 1
x = x + 1
if out < 20:
out = out - 20
return out


class BackwardNetNoAssign(nn.Cell):
def __init__(self, net):
super(BackwardNetNoAssign, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation(get_all=True)

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


# This test case has a problem of evaluator endless loop.
def test_backward_no_assign():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
# Graph Mode
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNetNoAssign(max_cycles=3)
graph_backward_net = BackwardNetNoAssign(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNetNoAssign(max_cycles=3)
pynative_backward_net = BackwardNetNoAssign(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 15
- 8
tests/st/control/inner/test_122_if_after_while_in_for.py View File

@@ -19,6 +19,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,6 +29,7 @@ class ForwardNet(nn.Cell):
super(ForwardNet, self).__init__() super(ForwardNet, self).__init__()
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
out = self.zero out = self.zero
@@ -35,9 +37,11 @@ class ForwardNet(nn.Cell):
while x < y: while x < y:
out = x * y + out out = x * y + out
x = x + 1 x = x + 1
self.weight = x
if out > 20: if out > 20:
self.weight = out
out = out - 20 out = out - 20
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -54,25 +58,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 15
- 8
tests/st/control/inner/test_200_while_after_if.py View File

@@ -18,6 +18,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,16 +29,19 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
i = self.i i = self.i
out = self.zero out = self.zero
if out >= 20: if out >= 20:
out = out - 20 out = out - 20
self.weight = i
while i < self.max_cycles: while i < self.max_cycles:
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
return out
self.weight = i
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -54,25 +58,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 15
- 8
tests/st/control/inner/test_210_while_after_if_in_if.py View File

@@ -18,6 +18,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,17 +29,20 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
i = self.i i = self.i
out = self.zero out = self.zero
if x < y: if x < y:
if out >= 20: if out >= 20:
self.weight = x
out = out - 20 out = out - 20
while i < self.max_cycles: while i < self.max_cycles:
self.weight = i
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -55,25 +59,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 18
- 8
tests/st/control/inner/test_211_while_after_if_in_while.py View File

@@ -18,6 +18,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,17 +29,23 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
i = self.i i = self.i
out = self.zero out = self.zero
while i < self.max_cycles: while i < self.max_cycles:
self.weight = i
if out <= 20: if out <= 20:
out = x * y + out out = x * y + out
self.weight = out
i = i + 1 i = i + 1
i = self.i
while i < self.max_cycles: while i < self.max_cycles:
self.weight = i
out = out + 10 out = out + 10
return out
i = i + 1
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -55,25 +62,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=10)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=10)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 62
- 9
tests/st/control/inner/test_212_while_after_if_in_for.py View File

@@ -18,6 +18,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,17 +29,20 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
i = self.i i = self.i
out = self.zero out = self.zero
for _ in range(0, self.max_cycles): for _ in range(0, self.max_cycles):
if out <= 20: if out <= 20:
self.weight = out
out = x * y + out out = x * y + out
while i < self.max_cycles: while i < self.max_cycles:
out = out + 10 out = out + 10
i = i + 1 i = i + 1
return out
self.weight = self.weight - i
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -55,26 +59,75 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(3), mstype.int32) x = Tensor(np.array(3), mstype.int32)
y = Tensor(np.array(5), mstype.int32) y = Tensor(np.array(5), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out





def test_backward(): def test_backward():
x = Tensor(np.array(3), mstype.int32) x = Tensor(np.array(3), mstype.int32)
y = Tensor(np.array(5), mstype.int32) y = Tensor(np.array(5), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads


class ForwardNetNoAssign(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNetNoAssign, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))

def construct(self, x, y):
i = self.i
out = self.zero
for _ in range(0, self.max_cycles):
if out <= 20:
out = x * y + out
while i < self.max_cycles:
out = out + 10
i = i + 1
return out


class BackwardNetNoAssign(nn.Cell):
def __init__(self, net):
super(BackwardNetNoAssign, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation(get_all=True)

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_backward_no_assign():
x = Tensor(np.array(3), mstype.int32)
y = Tensor(np.array(5), mstype.int32)
# Graph Mode
context.set_context(mode=context.GRAPH_MODE)
graph_forward_net = ForwardNetNoAssign(max_cycles=3)
graph_backward_net = BackwardNetNoAssign(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNetNoAssign(max_cycles=3)
pynative_backward_net = BackwardNetNoAssign(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

+ 5
- 1
tests/st/control/inner/test_220_while_after_while_in_if.py View File

@@ -19,6 +19,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -29,6 +30,7 @@ class ForwardNet(nn.Cell):
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32) self.i = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
out = self.zero out = self.zero
@@ -37,9 +39,11 @@ class ForwardNet(nn.Cell):
while i < self.max_cycles: while i < self.max_cycles:
out = x * y + out out = x * y + out
i = i + 1 i = i + 1
self.weight = i
while out > 20: while out > 20:
self.weight = out
out = out - 20 out = out - 20
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):


+ 15
- 8
tests/st/control/inner/test_230_while_after_for_in_if.py View File

@@ -19,6 +19,7 @@ from mindspore import nn
from mindspore import Tensor from mindspore import Tensor
from mindspore.ops import composite as C from mindspore.ops import composite as C
from mindspore import context from mindspore import context
from mindspore.common.parameter import Parameter


context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend") context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


@@ -28,15 +29,18 @@ class ForwardNet(nn.Cell):
super(ForwardNet, self).__init__() super(ForwardNet, self).__init__()
self.max_cycles = max_cycles self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32) self.zero = Tensor(np.array(0), mstype.int32)
self.weight = Parameter(Tensor(np.array(0), mstype.int32))


def construct(self, x, y): def construct(self, x, y):
out = self.zero out = self.zero
if x > y: if x > y:
for _ in range(0, self.max_cycles): for _ in range(0, self.max_cycles):
self.weight = out
out = x * y + out out = x * y + out
while out > 20: while out > 20:
self.weight = out
out = out - 20 out = out - 20
return out
return out, self.weight




class BackwardNet(nn.Cell): class BackwardNet(nn.Cell):
@@ -53,25 +57,28 @@ class BackwardNet(nn.Cell):
def test_forward(): def test_forward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_out = forward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_mode_out = graph_forward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_out = forward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_mode_out = pynative_forward_net(x, y)
assert graph_mode_out == pynative_mode_out assert graph_mode_out == pynative_mode_out




def test_backward(): def test_backward():
x = Tensor(np.array(1), mstype.int32) x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32) y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
# Graph Mode # Graph Mode
context.set_context(mode=context.GRAPH_MODE) context.set_context(mode=context.GRAPH_MODE)
graph_mode_grads = backward_net(x, y)
graph_forward_net = ForwardNet(max_cycles=3)
graph_backward_net = BackwardNet(graph_forward_net)
graph_mode_grads = graph_backward_net(x, y)
# Pynative Mode # Pynative Mode
context.set_context(mode=context.PYNATIVE_MODE) context.set_context(mode=context.PYNATIVE_MODE)
pynative_mode_grads = backward_net(x, y)
pynative_forward_net = ForwardNet(max_cycles=3)
pynative_backward_net = BackwardNet(pynative_forward_net)
pynative_mode_grads = pynative_backward_net(x, y)
assert graph_mode_grads == pynative_mode_grads assert graph_mode_grads == pynative_mode_grads

Loading…
Cancel
Save