Browse Source

!30916 [DynamicShape]fix some problems of InferShapeForNopNode

Merge pull request !30916 from hanhuifeng/dynamic_nop
r1.7
i-robot Gitee 4 years ago
parent
commit
c23f5eebe4
No known key found for this signature in database GPG Key ID: 173E9B9CA92EEF8F
7 changed files with 68 additions and 9 deletions
  1. +2
    -0
      mindspore/ccsrc/backend/common/optimizer/helper.cc
  2. +1
    -0
      mindspore/ccsrc/include/common/utils/anfalgo.h
  3. +1
    -0
      mindspore/ccsrc/include/common/utils/utils.h
  4. +12
    -8
      mindspore/ccsrc/kernel/kernel.cc
  5. +1
    -1
      mindspore/ccsrc/kernel/kernel.h
  6. +19
    -0
      mindspore/ccsrc/utils/anfalgo.cc
  7. +32
    -0
      tests/st/ops/gpu/test_dynamic_ops.py

+ 2
- 0
mindspore/ccsrc/backend/common/optimizer/helper.cc View File

@@ -337,6 +337,7 @@ void HideNopNode(session::KernelGraph *const graph) {
MS_EXCEPTION_IF_NULL(cnode);
if (NeedHideNode(outputs, cnode, is_dynamic_graph)) {
common::AnfAlgo::SetNodeAttr(kAttrSkipNopOpAddr, MakeValue(true), cnode);
common::AnfAlgo::SetNodeAttr(kAttrSkipNopOpExecution, MakeValue(true), cnode);
} else {
new_nodes.push_back(cnode);
}
@@ -361,6 +362,7 @@ void RemoveNopNode(session::KernelGraph *const graph) {
// ignore nop node itself
if (NeedHideNode(outputs, cnode, is_dynamic_graph)) {
common::AnfAlgo::SetNodeAttr(kAttrSkipNopOpAddr, MakeValue(true), cnode);
common::AnfAlgo::SetNodeAttr(kAttrSkipNopOpExecution, MakeValue(true), cnode);
continue;
}
// Replace the input which is nop node


+ 1
- 0
mindspore/ccsrc/include/common/utils/anfalgo.h View File

@@ -145,6 +145,7 @@ class COMMON_EXPORT AnfAlgo {
static bool IsInplaceNode(const AnfNodePtr &node, const string &type);
static bool IsGetNext(const NotNull<AnfNodePtr> &node);
static bool IsNeedSkipNopOpAddr(const AnfNodePtr &node);
static bool IsNeedSkipNopOpExecution(const AnfNodePtr &node);
static FuncGraphPtr GetValueNodeFuncGraph(const AnfNodePtr &node);
static bool IsSwitchCall(const CNodePtr &call_node);
static bool IsScalarInput(const CNodePtr &cnode, size_t index);


+ 1
- 0
mindspore/ccsrc/include/common/utils/utils.h View File

@@ -521,6 +521,7 @@ constexpr auto kAttrInputSize = "input_size";
constexpr auto kAttrDstType = "dst_type";
constexpr auto kAttrDump = "dump";
constexpr auto kAttrSkipNopOpAddr = "skip_nop_op_addr";
constexpr auto kAttrSkipNopOpExecution = "skip_nop_op_execution";
constexpr auto kAttrFixedInputFormat = "fixed_input_format";
constexpr auto kAttrFixedOutputFormat = "fixed_output_format";
constexpr auto kAttrFixedInputDeviceShape = "fixed_input_device_shape";


+ 12
- 8
mindspore/ccsrc/kernel/kernel.cc View File

@@ -63,7 +63,7 @@ void KernelMod::InferShape() {
MS_EXCEPTION_IF_NULL(real_input);
auto cnode_input = cnode->input(i + 1);
MS_EXCEPTION_IF_NULL(cnode_input);
InferShapeForNopNode(&real_input);
InferShapeForNopNode(real_input);
if (depend_list_.find(i) != depend_list_.end()) {
auto pre_node_with_index = common::AnfAlgo::GetPrevNodeOutput(cnode, i);
bool skip_nop_node = !context->get_param<bool>(MS_CTX_ENABLE_MINDRT);
@@ -138,24 +138,28 @@ bool KernelMod::InferShapeForDefiniteOutputNode(const CNodePtr &cnode) {
return true;
}

void KernelMod::InferShapeForNopNode(AnfNodePtr *input_node) {
MS_EXCEPTION_IF_NULL(*input_node);
if (!common::AnfAlgo::IsNopNode(*input_node) || !common::AnfAlgo::IsDynamicShape(*input_node)) {
void KernelMod::InferShapeForNopNode(const AnfNodePtr &input_node) {
MS_EXCEPTION_IF_NULL(input_node);
if (!common::AnfAlgo::IsNopNode(input_node) || !common::AnfAlgo::IsDynamicShape(input_node)) {
MS_LOG(INFO) << "Input node is not a nop node, no need infer.";
return;
}
if (!common::AnfAlgo::IsNeedSkipNopOpExecution(input_node)) {
MS_LOG(INFO) << "The Nop node need execution, no need the InferShapeForNopNode.";
return;
}
MS_LOG(INFO) << "Infer shape for nop node.";
std::stack<AnfNodePtr> nop_road;
nop_road.push(*input_node);
nop_road.push(input_node);

auto in_node = input_node;
/*lint -e716*/
while (true) {
auto input_node_with_idx = common::AnfAlgo::GetPrevNodeOutput(*input_node, 0);
auto in_node = input_node_with_idx.first;
auto input_node_with_idx = common::AnfAlgo::GetPrevNodeOutput(in_node, 0);
in_node = input_node_with_idx.first;
MS_EXCEPTION_IF_NULL(in_node);
if (common::AnfAlgo::IsNopNode(in_node)) {
nop_road.push(in_node);
*input_node = in_node;
} else {
break;
}


+ 1
- 1
mindspore/ccsrc/kernel/kernel.h View File

@@ -239,7 +239,7 @@ class KernelMod {
std::set<uint32_t> depend_list_;

private:
void InferShapeForNopNode(AnfNodePtr *input_node);
void InferShapeForNopNode(const AnfNodePtr &input_node);
bool InferShapeForDefiniteOutputNode(const CNodePtr &cnode);

std::vector<AddressPtr> inputs_addr_;


+ 19
- 0
mindspore/ccsrc/utils/anfalgo.cc View File

@@ -961,6 +961,25 @@ bool AnfAlgo::IsNeedSkipNopOpAddr(const AnfNodePtr &node) {
return GetValue<bool>(skip_nop_op_addr_attr);
}

bool AnfAlgo::IsNeedSkipNopOpExecution(const AnfNodePtr &node) {
MS_EXCEPTION_IF_NULL(node);
if (!node->isa<CNode>()) {
return false;
}

auto primitive = AnfAlgo::GetCNodePrimitive(node);
if (primitive == nullptr) {
return false;
}

auto skip_nop_execution_attr = primitive->GetAttr(kAttrSkipNopOpExecution);
if (skip_nop_execution_attr == nullptr) {
return false;
}

return GetValue<bool>(skip_nop_execution_attr);
}

FuncGraphPtr AnfAlgo::GetValueNodeFuncGraph(const AnfNodePtr &node) {
MS_EXCEPTION_IF_NULL(node);
auto value_node = node->cast<ValueNodePtr>();


+ 32
- 0
tests/st/ops/gpu/test_dynamic_ops.py View File

@@ -258,3 +258,35 @@ def test_dynamic_reduce_sum():
inputs = data_list[0]
output_cmp = np.sum(inputs[0], inputs[1][0])
assert np.allclose(output.asnumpy(), output_cmp, rtol=1.0e-4, atol=1.0e-4)


class NopNet(nn.Cell):
def construct(self, x):
x1 = ops.squeeze(x)
y1 = ops.expand_dims(x1, 1)
return ops.sub(y1, x1)


@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_dynamic_nop():
"""
Feature: Test Nop.
Description: The shape of inputs is dynamic.
Expectation: Assert that results are consistent with fixed shape.
"""
dtype = np.float32
data_list = []
for i in [2, 64]:
data = []
data.append(np.random.rand(i, 1).astype(dtype))
data_list.append(tuple(data))
column_names = get_columns(len(data_list[0]))
dataset = ds.GeneratorDataset(data_list, column_names, shuffle=False)
dynamic_columns = {column_names[0]: [None, 1]}
dataset.set_dynamic_columns(columns=dynamic_columns)
net = NopNet()
output = dynamic_shape_sink_process(net, dataset)
output_cmp = fixed_shape_process(net, dataset)
assert compare(output, output_cmp)

Loading…
Cancel
Save