Browse Source

!15577 [ME][ControlFlow] Add some control sink testcases

From: @Margaret_wangrui
Reviewed-by: @ginfung,@zh_qh
Signed-off-by: @zh_qh
pull/15577/MERGE
mindspore-ci-bot Gitee 4 years ago
parent
commit
5a48a08e34
2 changed files with 156 additions and 0 deletions
  1. +104
    -0
      tests/st/control/inner/test_002_single_for.py
  2. +52
    -0
      tests/st/control/inner/test_030_for_in_if.py

+ 104
- 0
tests/st/control/inner/test_002_single_for.py View File

@@ -12,8 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
from mindspore import context
from mindspore import Tensor, nn
from mindspore.common.parameter import Parameter
from mindspore.ops import composite as C
from mindspore.ops import operations as P
from mindspore.common import dtype as mstype
@@ -107,3 +109,105 @@ def test_single_for_02():

assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res


def test_single_for_03():
class SingleForNet(nn.Cell):
def __init__(self):
super().__init__()
self.mul = P.Mul()
self.add = P.Add()
self.sub = P.Sub()
self.assign = P.Assign()
param_a = np.full((1,), 5, dtype=np.float32)
self.param_a = Parameter(Tensor(param_a), name='a')
param_b = np.full((1,), 2, dtype=np.float32)
self.param_b = Parameter(Tensor(param_b), name='b')

def func(self, x):
x = self.mul(x, 2)
for _ in range(0, 5):
x = self.add(x, x)
self.param_b = self.param_b + 1
return x - self.param_b

def construct(self, x, y):
self.assign(self.param_a, x + self.param_a)
z = self.func(x)
x = self.param_a + y + z
return x, self.param_b

class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net

def construct(self, *inputs):
return grad_all(self.net)(*inputs)

x = Tensor([2], mstype.int32)
y = Tensor([5], mstype.int32)

# graph mode
context.set_context(mode=context.GRAPH_MODE)
single_for_net = SingleForNet()
net = GradNet(single_for_net)
graph_forward_res = single_for_net(x, y)
graph_backward_res = net(x, y)

# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
single_for_net = SingleForNet()
net = GradNet(single_for_net)
pynative_forward_res = single_for_net(x, y)
pynative_backward_res = net(x, y)

assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res


def test_single_for_04():
class SingleForNet(nn.Cell):
def __init__(self):
super().__init__()
self.mul = P.Mul()
self.add = P.Add()
self.sub = P.Sub()
self.assign = P.Assign()
param_a = np.full((1,), 5, dtype=np.float32)
self.param_a = Parameter(Tensor(param_a), name='a')
param_b = np.full((1,), 2, dtype=np.float32)
self.param_b = Parameter(Tensor(param_b), name='b')

def construct(self, x):
self.assign(self.param_a, x + self.param_a)
for _ in range(1):
self.param_b = x - self.param_a
return self.param_b

class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net

def construct(self, *inputs):
return grad_all(self.net)(*inputs)

x = Tensor([2], mstype.int32)

# graph mode
context.set_context(mode=context.GRAPH_MODE)
single_for_net = SingleForNet()
net = GradNet(single_for_net)
graph_forward_res = single_for_net(x)
graph_backward_res = net(x)

# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
single_for_net = SingleForNet()
net = GradNet(single_for_net)
pynative_forward_res = single_for_net(x)
pynative_backward_res = net(x)

assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res

+ 52
- 0
tests/st/control/inner/test_030_for_in_if.py View File

@@ -118,3 +118,55 @@ def test_for_in_if_02():

assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res


def test_for_in_if_03():
class ForInIfNet(nn.Cell):
def __init__(self):
super().__init__()
self.mul = P.Mul()
self.add = P.Add()
param_a = np.full((1,), 5, dtype=np.float32)
self.param_a = Parameter(Tensor(param_a), name='a')
param_b = np.full((1,), 4, dtype=np.float32)
self.param_b = Parameter(Tensor(param_b), name='b')

def construct(self, x):
y = x + self.param_b
if self.param_a > self.param_b:
x = self.mul(x, 2)
for i in range(-1, 5):
x = self.add(i, x)
self.param_b += 1
elif y > x:
y = self.param_a * y
else:
x = self.param_b * x
return x, y

class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net

def construct(self, *inputs):
return grad_all(self.net)(*inputs)

x = Tensor([10], mstype.int32)

# graph mode
context.set_context(mode=context.GRAPH_MODE)
for_in_if_net = ForInIfNet()
net = GradNet(for_in_if_net)
graph_forward_res = for_in_if_net(x)
graph_backward_res = net(x)

# pynative mode
context.set_context(mode=context.PYNATIVE_MODE)
for_in_if_net = ForInIfNet()
net = GradNet(for_in_if_net)
pynative_forward_res = for_in_if_net(x)
pynative_backward_res = net(x)

assert graph_forward_res == pynative_forward_res
assert graph_backward_res == pynative_backward_res

Loading…
Cancel
Save