| @@ -215,9 +215,9 @@ void AtomicCleanInsertter::CreateInplaceAssignNodeAndCorrectReturn(const FuncGra | |||
| } | |||
| auto inplace_assign_node = | |||
| CreateCNode({NewValueNode(std::make_shared<Primitive>("InplaceAssign")), new_parameter, atomic_add_node_, out_node}, | |||
| sub_graph, {.format = GetFormat(out_node), .shape = GetShape(out_node), .type = GetType(out_node)}); | |||
| AnfAlgo::SetNodeAttr("fake_output", MakeValue(fake_out), inplace_assign_node); | |||
| CreateCNode({NewValueNode(prim::kPrimInplaceAssign), new_parameter, atomic_add_node_, out_node}, sub_graph, | |||
| {.format = GetFormat(out_node), .shape = GetShape(out_node), .type = GetType(out_node)}); | |||
| SetNodeAttrSafely("fake_output", MakeValue(fake_out), inplace_assign_node); | |||
| CNodePtr new_out_node; | |||
| if (real_output_num_ > 2) { | |||
| @@ -269,7 +269,7 @@ void AtomicCleanInsertter::ProcessOriginCNode(const AnfNodePtr &composite_node, | |||
| } | |||
| // Add atomic attribute to reducesum node. | |||
| AnfAlgo::SetNodeAttr("enable_atomic_add", MakeValue(true), atomic_add_node_); | |||
| SetNodeAttrSafely("enable_atomic_add", MakeValue(true), atomic_add_node_); | |||
| // add input | |||
| auto inputs = composite_node->cast<CNodePtr>()->inputs(); | |||
| @@ -380,7 +380,7 @@ CNodePtr AtomicCleanInsertter::CreateAtomicCleanCompositeNode(const KernelGraphP | |||
| AnfNodePtrList cast_inputs = {NewValueNode(prim::kPrimCast), value_node}; | |||
| auto cast_node_inner = | |||
| CreateCNode(cast_inputs, new_sub_graph, {.format = format, .shape = {1}, .type = TypeIdToType(dst_type)}); | |||
| AnfAlgo::SetNodeAttr("dst_type", MakeValue("float32"), cast_node_inner); | |||
| SetNodeAttrSafely("dst_type", MakeValue("float32"), cast_node_inner); | |||
| broadcast_input_node = cast_node_inner; | |||
| } | |||
| @@ -393,7 +393,7 @@ CNodePtr AtomicCleanInsertter::CreateAtomicCleanCompositeNode(const KernelGraphP | |||
| broadcast_input_node}; | |||
| auto broadcast_to_node_inner = CreateCNode( | |||
| atomic_clean_inputs, new_sub_graph, {.format = format, .shape = dst_shape_vec, .type = GetType(atomic_add_node_)}); | |||
| AnfAlgo::SetNodeAttr("shape", MakeValue(dst_shape_vec), broadcast_to_node_inner); | |||
| SetNodeAttrSafely("shape", MakeValue(dst_shape_vec), broadcast_to_node_inner); | |||
| // Makeup sub-graph. | |||
| new_sub_graph->set_output(broadcast_to_node_inner); | |||
| @@ -80,11 +80,11 @@ CNodePtr StitchAtomicCleanInsertter::CreateInplaceAssignNodeAndCorrectReturn(con | |||
| // add inplaceassign | |||
| AnfNodePtr out_node = atomic_add_node_; // Use result data itself, and set attr "fake_out" true. | |||
| auto inplace_assign_node = | |||
| CreateCNode({NewValueNode(std::make_shared<Primitive>("InplaceAssign")), new_parameter, atomic_add_node_, out_node}, | |||
| sub_graph, {.format = GetFormat(out_node), .shape = GetShape(out_node), .type = GetType(out_node)}); | |||
| AnfAlgo::SetNodeAttr("fake_output", MakeValue(true), inplace_assign_node); | |||
| CreateCNode({NewValueNode(prim::kPrimInplaceAssign), new_parameter, atomic_add_node_, out_node}, sub_graph, | |||
| {.format = GetFormat(out_node), .shape = GetShape(out_node), .type = GetType(out_node)}); | |||
| SetNodeAttrSafely("fake_output", MakeValue(true), inplace_assign_node); | |||
| AnfAlgo::EraseNodeAttr(kAttrStitch, atomic_add_node_); | |||
| AnfAlgo::SetNodeAttr(kAttrStitch, MakeValue("common"), inplace_assign_node); | |||
| SetNodeAttrSafely(kAttrStitch, MakeValue("common"), inplace_assign_node); | |||
| return inplace_assign_node; | |||
| } | |||
| @@ -987,8 +987,8 @@ CNodePtr CreateCNode(const std::vector<AnfNodePtr> &inputs, const FuncGraphPtr & | |||
| if (AnfAlgo::IsRealKernel(cnode)) { | |||
| // if the node only has the primitive(such as getNext) or the node's input has a feature map input | |||
| // then the node's output is a feature map output | |||
| AnfAlgo::SetNodeAttr(kIsFeatureMapOutput, MakeValue(kernel_info->is_feature_map()), cnode); | |||
| AnfAlgo::SetNodeAttr(kIsFeatureMapInputList, MakeValue(feature_map_input_indexs), cnode); | |||
| SetNodeAttrSafely(kIsFeatureMapOutput, MakeValue(kernel_info->is_feature_map()), cnode); | |||
| SetNodeAttrSafely(kIsFeatureMapInputList, MakeValue(feature_map_input_indexs), cnode); | |||
| } | |||
| // Setup kernel build info. | |||
| @@ -1020,7 +1020,8 @@ CNodePtr CreateCNode(const std::vector<AnfNodePtr> &inputs, const FuncGraphPtr & | |||
| return cnode; | |||
| } | |||
| void MakeCNodeSafeForAttr(const AnfNodePtr &node) { | |||
| void SetNodeAttrSafely(const std::string &key, const ValuePtr &value, const AnfNodePtr &node) { | |||
| // Make CNode safe to set attr firstly. | |||
| auto cnode = node->cast<CNodePtr>(); | |||
| if (cnode == nullptr) { | |||
| return; | |||
| @@ -1029,6 +1030,9 @@ void MakeCNodeSafeForAttr(const AnfNodePtr &node) { | |||
| auto inputs = cnode->inputs(); | |||
| new_inputs.insert(new_inputs.end(), inputs.begin() + 1, inputs.end()); | |||
| cnode->set_inputs(new_inputs); | |||
| // Set attr secondly. | |||
| AnfAlgo::SetNodeAttr(key, value, node); | |||
| } | |||
| } // namespace opt | |||
| } // namespace mindspore | |||
| @@ -90,7 +90,7 @@ ShapeVector GetShape(const AnfNodePtr &node); | |||
| std::vector<int64_t> GetReduceAxis(const AnfNodePtr &node); | |||
| CNodePtr CreateCNode(const std::vector<AnfNodePtr> &inputs, const FuncGraphPtr &func_graph, const DataInfo &out_info); | |||
| void MakeCNodeSafeForAttr(const AnfNodePtr &node); | |||
| void SetNodeAttrSafely(const std::string &key, const ValuePtr &value, const AnfNodePtr &node); | |||
| template <typename T> | |||
| ValueNodePtr CreateScalarTensorValueNode(const DataInfo &info, T value, size_t data_length) { | |||
| @@ -191,10 +191,9 @@ bool ReplaceAssignByInplaceAssignInGraphkernel(const FuncGraphPtr &func_graph) { | |||
| if (!AnfAlgo::CheckPrimitiveType(n, prim::kPrimAssign)) continue; | |||
| changed = true; | |||
| auto cnode = n->cast<CNodePtr>(); | |||
| AnfNodePtrList inputs = {NewValueNode(prim::kPrimInplaceAssign->Clone()), cnode->input(1), cnode->input(2), | |||
| cnode->input(2)}; | |||
| AnfNodePtrList inputs = {NewValueNode(prim::kPrimInplaceAssign), cnode->input(1), cnode->input(2), cnode->input(2)}; | |||
| auto new_cnode = func_graph->NewCNode(inputs); | |||
| AnfAlgo::SetNodeAttr("fake_output", MakeValue(true), new_cnode); | |||
| SetNodeAttrSafely("fake_output", MakeValue(true), new_cnode); | |||
| new_cnode->set_abstract(inputs.back()->abstract()); | |||
| new_cnode->set_kernel_info(std::make_shared<device::KernelInfo>()); | |||
| std::vector<std::string> input_formats = AnfAlgo::GetAllInputFormats(cnode); | |||
| @@ -770,20 +770,17 @@ void ParallelOpFusion::SetFusedParallelOpAttrToReturnNode(const ParallelInfo &pa | |||
| const auto &fuse_nodes = parallel_info.nodes(); | |||
| std::vector<size_t> info = {i, std::dynamic_pointer_cast<CommonDimInfo>(parallel_info.dims()[i])->dim_info()}; | |||
| if (!AnfAlgo::IsGraphKernel(fuse_nodes[i])) { | |||
| MakeCNodeSafeForAttr(fuse_nodes[i]); | |||
| AnfAlgo::SetNodeAttr(kAttrParallelDimInfo, MakeValue<std::vector<size_t>>(info), fuse_nodes[i]); | |||
| SetNodeAttrSafely(kAttrParallelDimInfo, MakeValue<std::vector<size_t>>(info), fuse_nodes[i]); | |||
| } else { | |||
| auto node_g = GetValueNode<FuncGraphPtr>((fuse_nodes[i]->cast<CNodePtr>())->input(0)); | |||
| auto out_node = node_g->output(); | |||
| if (IsPrimitiveCNode(out_node, prim::kPrimMakeTuple)) { | |||
| auto inputs = out_node->cast<CNodePtr>()->inputs(); | |||
| for (size_t j = 1; j < inputs.size(); ++j) { | |||
| MakeCNodeSafeForAttr(inputs[j]); | |||
| AnfAlgo::SetNodeAttr(kAttrParallelDimInfo, MakeValue<std::vector<size_t>>(info), inputs[j]); | |||
| SetNodeAttrSafely(kAttrParallelDimInfo, MakeValue<std::vector<size_t>>(info), inputs[j]); | |||
| } | |||
| } else { | |||
| MakeCNodeSafeForAttr(out_node); | |||
| AnfAlgo::SetNodeAttr(kAttrParallelDimInfo, MakeValue<std::vector<size_t>>(info), out_node); | |||
| SetNodeAttrSafely(kAttrParallelDimInfo, MakeValue<std::vector<size_t>>(info), out_node); | |||
| } | |||
| } | |||
| } | |||
| @@ -39,9 +39,9 @@ bool RaiseReductionPrecision::IsFp16ReduceSum(const AnfNodePtr &node) { | |||
| AnfNodePtr RaiseReductionPrecision::CreateCast(const AnfNodePtr &input, const TypePtr &dst_type, std::string format) { | |||
| auto func_graph = input->func_graph(); | |||
| MS_EXCEPTION_IF_NULL(func_graph); | |||
| AnfNodePtrList inputs = {NewValueNode(prim::kPrimCast->Clone()), input}; | |||
| AnfNodePtrList inputs = {NewValueNode(prim::kPrimCast), input}; | |||
| auto cnode = CreateCNode(inputs, func_graph, {.format = format, .shape = GetShape(input), .type = dst_type}); | |||
| AnfAlgo::SetNodeAttr("dst_type", MakeValue(kernel::TypeId2String(dst_type->type_id())), cnode); | |||
| SetNodeAttrSafely("dst_type", MakeValue(kernel::TypeId2String(dst_type->type_id())), cnode); | |||
| return cnode; | |||
| } | |||
| @@ -77,7 +77,7 @@ const AnfNodePtr SubstituteDropout::Process(const FuncGraphPtr &func_graph, cons | |||
| ShapeVector shape_i64; | |||
| std::transform(shape.begin(), shape.end(), std::back_inserter(shape_i64), [](size_t x) { return SizeToLong(x); }); | |||
| // The primitive should use a clone, otherwise the attr seed will be overrided. | |||
| // The primitive should use a clone, otherwise the attr seed will be overrode. | |||
| AnfNodePtrList uniform_input = {NewValueNode(prim::kPrimCudnnUniformReal->Clone())}; | |||
| auto tensor = std::make_shared<tensor::Tensor>(kNumberTypeInt64, ShapeVector(1, SizeToLong(shape.size())), | |||
| static_cast<void *>(&shape[0]), kNumberTypeInt64); | |||
| @@ -98,8 +98,8 @@ const AnfNodePtr SubstituteDropout::Process(const FuncGraphPtr &func_graph, cons | |||
| // create new uniform_real_node | |||
| auto uniform_real_node = func_graph->NewCNode(uniform_input); | |||
| AnfAlgo::GetCNodePrimitive(uniform_real_node)->set_attr("seed", MakeValue(SizeToLong(seed_++))); | |||
| AnfAlgo::GetCNodePrimitive(uniform_real_node)->set_attr("seed2", MakeValue(SizeToLong(seed_++))); | |||
| SetNodeAttrSafely("seed", MakeValue(SizeToLong(seed_++)), uniform_real_node); | |||
| SetNodeAttrSafely("seed2", MakeValue(SizeToLong(seed_++)), uniform_real_node); | |||
| auto uniform_abstract = std::make_shared<abstract::AbstractTensor>(std::make_shared<Float>(32), shape_i64); | |||
| uniform_real_node->set_abstract(uniform_abstract); | |||
| uniform_real_node->set_kernel_info(std::make_shared<device::KernelInfo>()); | |||
| @@ -110,7 +110,7 @@ const AnfNodePtr SubstituteDropout::Process(const FuncGraphPtr &func_graph, cons | |||
| new_node_inputs.push_back(cnode->input(1)); | |||
| new_node_inputs.push_back(uniform_real_node); | |||
| auto new_node = func_graph->NewCNode(new_node_inputs); | |||
| AnfAlgo::GetCNodePrimitive(new_node)->set_attr("keep_prob", AnfAlgo::GetCNodePrimitive(cnode)->GetAttr("keep_prob")); | |||
| SetNodeAttrSafely("keep_prob", MakeValue(AnfAlgo::GetNodeAttr<float>(cnode, "keep_prob")), new_node); | |||
| new_node->set_abstract(old_abstract); | |||
| new_node->set_kernel_info(std::make_shared<device::KernelInfo>()); | |||
| SetNewKernelInfo(new_node); | |||