Browse Source

add inner testcases of control flow

add testcases 2
pull/14641/head
chenfei_mindspore 4 years ago
parent
commit
ac6f5983b5
17 changed files with 1195 additions and 0 deletions
  1. +60
    -0
      tests/st/control/inner/test_001_single_while.py
  2. +67
    -0
      tests/st/control/inner/test_011_if_in_while.py
  3. +114
    -0
      tests/st/control/inner/test_011_if_in_while_break.py
  4. +64
    -0
      tests/st/control/inner/test_012_if_in_for.py
  5. +65
    -0
      tests/st/control/inner/test_012_if_in_for_break.py
  6. +68
    -0
      tests/st/control/inner/test_020_while_in_if.py
  7. +68
    -0
      tests/st/control/inner/test_101_if_after_while.py
  8. +69
    -0
      tests/st/control/inner/test_111_if_after_if_in_while.py
  9. +70
    -0
      tests/st/control/inner/test_120_if_after_while_in_if.py
  10. +71
    -0
      tests/st/control/inner/test_121_if_after_while_in_while.py
  11. +68
    -0
      tests/st/control/inner/test_122_if_after_while_in_for.py
  12. +68
    -0
      tests/st/control/inner/test_200_while_after_if.py
  13. +69
    -0
      tests/st/control/inner/test_210_while_after_if_in_if.py
  14. +69
    -0
      tests/st/control/inner/test_211_while_after_if_in_while.py
  15. +68
    -0
      tests/st/control/inner/test_212_while_after_if_in_for.py
  16. +70
    -0
      tests/st/control/inner/test_220_while_after_while_in_if.py
  17. +67
    -0
      tests/st/control/inner/test_230_while_after_for_in_if.py

+ 60
- 0
tests/st/control/inner/test_001_single_while.py View File

@@ -0,0 +1,60 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=True, device_target="Ascend")


class ForwardNet(nn.Cell):
def construct(self, x, y):
y = y + 10
while x < y:
x = (x + 2) * (y - 9)
y = y + 2
x = x + 5
return x


class BackwardNet(nn.Cell):
def __init__(self, forward_net):
super(BackwardNet, self).__init__()
self.forward_net = forward_net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
c1 = Tensor([0], mstype.int32)
c2 = Tensor([0], mstype.int32)
expect = Tensor([75], mstype.int32)
forward_net = ForwardNet()
output = forward_net(c1, c2)
assert expect == output


def test_backward():
c1 = Tensor([0], mstype.int32)
c2 = Tensor([0], mstype.int32)
expect = Tensor([75], mstype.int32)
forward_net = ForwardNet()
output = forward_net(c1, c2)
assert expect == output

+ 67
- 0
tests/st/control/inner/test_011_if_in_while.py View File

@@ -0,0 +1,67 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
while i < self.max_cycles:
if out <= 20:
out = x * y + out
i = i + 1
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 114
- 0
tests/st/control/inner/test_011_if_in_while_break.py View File

@@ -0,0 +1,114 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
while i < self.max_cycles:
out = x * y + out
if out > 20:
break
i = i + 1
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


class ForwardNetReplaceBreak(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNetReplaceBreak, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
while i < self.max_cycles and out <= 20:
out = x * y + out
i = i + 1
return out


class BackwardNetReplaceBreak(nn.Cell):
def __init__(self, net):
super(BackwardNetReplaceBreak, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


# Problem: Exceed function call depth limit 1000.
def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)


def test_forward_replace_break():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNetReplaceBreak(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


# Problem: Exceed function call depth limit 1000.
def test_backward_replace_break():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNetReplaceBreak(max_cycles=10)
backward_net = BackwardNetReplaceBreak(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 64
- 0
tests/st/control/inner/test_012_if_in_for.py View File

@@ -0,0 +1,64 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
for _ in range(self.max_cycles):
if out <= 20:
out = x * y + out
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 65
- 0
tests/st/control/inner/test_012_if_in_for_break.py View File

@@ -0,0 +1,65 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
for _ in range(self.max_cycles):
out = x * y + out
if out > 20:
break
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 68
- 0
tests/st/control/inner/test_020_while_in_if.py View File

@@ -0,0 +1,68 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
i = self.i
if x > y:
while i < self.max_cycles:
out = x * y + out
i = i + 1
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 68
- 0
tests/st/control/inner/test_101_if_after_while.py View File

@@ -0,0 +1,68 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
while i < self.max_cycles:
out = x * y + out
i = i + 1
if out >= 20:
out = out - 20
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 69
- 0
tests/st/control/inner/test_111_if_after_if_in_while.py View File

@@ -0,0 +1,69 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
while i < self.max_cycles:
if out <= 20:
out = x * y + out
i = i + 1
if out >= 30:
out = out - 30
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 70
- 0
tests/st/control/inner/test_120_if_after_while_in_if.py View File

@@ -0,0 +1,70 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
i = self.i
if x > y:
while i < self.max_cycles:
out = x * y + out
i = i + 1
if out > 20:
out = out - 20
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 71
- 0
tests/st/control/inner/test_121_if_after_while_in_while.py View File

@@ -0,0 +1,71 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
i = self.i
while x < y:
while i < self.max_cycles:
out = x * y + out
i = i + 1
x = x + 1
if out > 20:
out = out - 20
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 68
- 0
tests/st/control/inner/test_122_if_after_while_in_for.py View File

@@ -0,0 +1,68 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
while x < y:
for _ in range(0, self.max_cycles):
out = x * y + out
x = x + 1
if out > 20:
out = out - 20
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 68
- 0
tests/st/control/inner/test_200_while_after_if.py View File

@@ -0,0 +1,68 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
if out >= 20:
out = out - 20
while i < self.max_cycles:
out = x * y + out
i = i + 1
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 69
- 0
tests/st/control/inner/test_210_while_after_if_in_if.py View File

@@ -0,0 +1,69 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
if x < y:
if out >= 20:
out = out - 20
while i < self.max_cycles:
out = x * y + out
i = i + 1
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 69
- 0
tests/st/control/inner/test_211_while_after_if_in_while.py View File

@@ -0,0 +1,69 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
while i < self.max_cycles:
if out <= 20:
out = x * y + out
i = i + 1
while i < self.max_cycles:
out = out + 10
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 68
- 0
tests/st/control/inner/test_212_while_after_if_in_for.py View File

@@ -0,0 +1,68 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.i = Tensor(np.array(0), mstype.int32)
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
i = self.i
out = self.zero
for _ in range(0, self.max_cycles):
if out <= 20:
out = x * y + out
while i < self.max_cycles:
out = out + 10
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=10)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 70
- 0
tests/st/control/inner/test_220_while_after_while_in_if.py View File

@@ -0,0 +1,70 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)
self.i = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
i = self.i
if x > y:
while i < self.max_cycles:
out = x * y + out
i = i + 1
while out > 20:
out = out - 20
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

+ 67
- 0
tests/st/control/inner/test_230_while_after_for_in_if.py View File

@@ -0,0 +1,67 @@
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================

import numpy as np
from mindspore.common import dtype as mstype
from mindspore import nn
from mindspore import Tensor
from mindspore.ops import composite as C
from mindspore import context

context.set_context(mode=context.GRAPH_MODE, save_graphs=False, device_target="Ascend")


class ForwardNet(nn.Cell):
def __init__(self, max_cycles=10):
super(ForwardNet, self).__init__()
self.max_cycles = max_cycles
self.zero = Tensor(np.array(0), mstype.int32)

def construct(self, x, y):
out = self.zero
if x > y:
for _ in range(0, self.max_cycles):
out = x * y + out
while out > 20:
out = out - 20
return out


class BackwardNet(nn.Cell):
def __init__(self, net):
super(BackwardNet, self).__init__(auto_prefix=False)
self.forward_net = net
self.grad = C.GradOperation()

def construct(self, *inputs):
grads = self.grad(self.forward_net)(*inputs)
return grads


def test_forward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
out = forward_net(x, y)
print("forward out:", out)


def test_backward():
x = Tensor(np.array(1), mstype.int32)
y = Tensor(np.array(3), mstype.int32)
forward_net = ForwardNet(max_cycles=3)
backward_net = BackwardNet(forward_net)
grads = backward_net(x, y)
print("grads:", grads)

Loading…
Cancel
Save