Browse Source

!2505 [Code Reivew] fix code review content

Merge pull request !2505 from jjfeing/master
tags/v0.6.0-beta
mindspore-ci-bot Gitee 5 years ago
parent
commit
ce9c68d8da
9 changed files with 40 additions and 38 deletions
  1. +2
    -0
      mindspore/ccsrc/kernel/kash/kernel_pack.cc
  2. +2
    -0
      mindspore/ccsrc/kernel/oplib/oplib.cc
  3. +14
    -14
      mindspore/ccsrc/kernel/tbe/tbe_kernel_build.cc
  4. +1
    -1
      mindspore/ccsrc/kernel/tbe/tbe_kernel_parallel_build.cc
  5. +1
    -1
      mindspore/ccsrc/kernel/tbe/tbe_kernel_parallel_build.h
  6. +2
    -3
      mindspore/ccsrc/kernel/tbe/tbe_kernel_select/tbe_kernel_broadcast_selecter.cc
  7. +2
    -3
      mindspore/ccsrc/kernel/tbe/tbe_kernel_select/tbe_kernel_reduce_selecter.cc
  8. +2
    -3
      mindspore/ccsrc/kernel/tbe/tbe_kernel_select/tbe_kernel_select.cc
  9. +14
    -13
      mindspore/ccsrc/kernel/tbe/tbe_utils.cc

+ 2
- 0
mindspore/ccsrc/kernel/kash/kernel_pack.cc View File

@@ -50,6 +50,8 @@ bool CheckHash(const std::string &json_file, const std::string &bin_file, const
} // namespace } // namespace


const std::string KernelPack::Serialize() const { const std::string KernelPack::Serialize() const {
MS_EXCEPTION_IF_NULL(json_);
MS_EXCEPTION_IF_NULL(kernel_);
std::string buffer; std::string buffer;
(void)buffer.append((const char *)json_, json_->len + sizeof(json_->len)); (void)buffer.append((const char *)json_, json_->len + sizeof(json_->len));
(void)buffer.append((const char *)kernel_, kernel_->len + sizeof(kernel_->len)); (void)buffer.append((const char *)kernel_, kernel_->len + sizeof(kernel_->len));


+ 2
- 0
mindspore/ccsrc/kernel/oplib/oplib.cc View File

@@ -293,8 +293,10 @@ bool OpLib::GetRefInfo(const std::shared_ptr<OpInfo> &op_info) {
const auto &output_infos = op_info->outputs_ptr(); const auto &output_infos = op_info->outputs_ptr();
const auto &input_infos = op_info->inputs_ptr(); const auto &input_infos = op_info->inputs_ptr();
for (size_t out_index = 0; out_index < output_infos.size(); out_index++) { for (size_t out_index = 0; out_index < output_infos.size(); out_index++) {
MS_EXCEPTION_IF_NULL(output_infos[out_index]);
const auto &out_name = output_infos[out_index]->name(); const auto &out_name = output_infos[out_index]->name();
for (size_t in_index = 0; in_index < input_infos.size(); in_index++) { for (size_t in_index = 0; in_index < input_infos.size(); in_index++) {
MS_EXCEPTION_IF_NULL(input_infos[in_index]);
const auto &in_name = input_infos[in_index]->name(); const auto &in_name = input_infos[in_index]->name();
if (out_name == in_name) { if (out_name == in_name) {
if (op_info->has_ref_index(out_index)) { if (op_info->has_ref_index(out_index)) {


+ 14
- 14
mindspore/ccsrc/kernel/tbe/tbe_kernel_build.cc View File

@@ -189,7 +189,7 @@ bool TbeKernelJsonCreator::GenInputList(const std::shared_ptr<AnfNode> &anf_node
input_list->emplace_back(input_desc_json); input_list->emplace_back(input_desc_json);
continue; continue;
} }
MS_LOG(ERROR) << "input num: " << *real_input_index << " is not match op inputs";
MS_LOG(ERROR) << "Input num: " << *real_input_index << " is not match op inputs";
return false; return false;
} }
if (op_name == "BatchNorm") { if (op_name == "BatchNorm") {
@@ -197,7 +197,7 @@ bool TbeKernelJsonCreator::GenInputList(const std::shared_ptr<AnfNode> &anf_node
auto attr = primitive->GetAttr("is_training"); auto attr = primitive->GetAttr("is_training");
MS_EXCEPTION_IF_NULL(attr); MS_EXCEPTION_IF_NULL(attr);
bool is_training = GetValue<bool>(attr); bool is_training = GetValue<bool>(attr);
MS_LOG(INFO) << "op_name" << op_name << ", tensor_name " << input_ptr->name() << ", is_training "
MS_LOG(INFO) << "Op_name" << op_name << ", tensor_name " << input_ptr->name() << ", is_training "
<< is_training; << is_training;
if (is_training) { if (is_training) {
(*real_input_index)++; (*real_input_index)++;
@@ -230,7 +230,7 @@ bool GetInputNameAndRealNum(const std::shared_ptr<AnfNode> &anf_node, const std:


if (input_ptr->param_type() == kParamDynamic) { if (input_ptr->param_type() == kParamDynamic) {
if (*dyn_input_index >= dyn_input_sizes.size()) { if (*dyn_input_index >= dyn_input_sizes.size()) {
MS_LOG(ERROR) << "dyn input index" << *dyn_input_index << "is over dyn input num" << dyn_input_sizes.size();
MS_LOG(ERROR) << "Dyn input index" << *dyn_input_index << "is over dyn input num" << dyn_input_sizes.size();
return false; return false;
} }
*input_num = IntToSize(dyn_input_sizes[*dyn_input_index]); *input_num = IntToSize(dyn_input_sizes[*dyn_input_index]);
@@ -314,7 +314,7 @@ bool TbeKernelJsonCreator::GenOutputDescJson(
output_obj_num = real_output_num; output_obj_num = real_output_num;
} else { } else {
if (output_idx >= real_output_num) { if (output_idx >= real_output_num) {
MS_LOG(INFO) << "op:" << op_name << ", output" << output_ptr->name() << " is optional, output is none.";
MS_LOG(INFO) << "Op:" << op_name << ", output" << output_ptr->name() << " is optional, output is none.";
std::vector<nlohmann::json> output_list; std::vector<nlohmann::json> output_list;
nlohmann::json output_obj; nlohmann::json output_obj;
output_obj[kJName] = output_ptr->name(); output_obj[kJName] = output_ptr->name();
@@ -389,7 +389,7 @@ bool TbeKernelJsonCreator::GenTbeAttrJson(const std::shared_ptr<AnfNode> &anf_no
attr_obj[kJValid] = false; attr_obj[kJValid] = false;
} else { } else {
if (attr_ptr->param_type() == kParamRequred && creater_type_ == SINGLE_BUILD) { if (attr_ptr->param_type() == kParamRequred && creater_type_ == SINGLE_BUILD) {
MS_LOG(EXCEPTION) << "op name: " << op_info->op_name() << " attr: " << attr_name
MS_LOG(EXCEPTION) << "Op name: " << op_info->op_name() << " attr: " << attr_name
<< " is required, but not set."; << " is required, but not set.";
} else { } else {
attr_obj[kJValid] = false; attr_obj[kJValid] = false;
@@ -451,7 +451,7 @@ void TbeKernelJsonCreator::ParseAttrValue(const std::string &type, const mindspo
auto attr_value = GetValue<std::vector<std::vector<int>>>(value); auto attr_value = GetValue<std::vector<std::vector<int>>>(value);
(*attr_obj)[kJValue] = attr_value; (*attr_obj)[kJValue] = attr_value;
} else { } else {
MS_LOG(EXCEPTION) << "type: " << type << "not support";
MS_LOG(EXCEPTION) << "Type: " << type << "not support";
} }
} }


@@ -536,7 +536,7 @@ std::string TbeKernelJsonCreator::GetDeviceOutputFormat(const AnfNodePtr &anf_no
bool TbeKernelBuild::GetIOSize(const nlohmann::json &kernel_json, std::vector<size_t> *input_size_list, bool TbeKernelBuild::GetIOSize(const nlohmann::json &kernel_json, std::vector<size_t> *input_size_list,
std::vector<size_t> *output_size_list) { std::vector<size_t> *output_size_list) {
if (input_size_list == nullptr || output_size_list == nullptr) { if (input_size_list == nullptr || output_size_list == nullptr) {
MS_LOG(ERROR) << "input size or output size is nullptr";
MS_LOG(ERROR) << "Input size or output size is nullptr";
return false; return false;
} }
input_size_list->clear(); input_size_list->clear();
@@ -750,7 +750,7 @@ bool TbeKernelBuild::GenFusionDataInputJson(const std::shared_ptr<mindspore::Anf
MS_EXCEPTION_IF_NULL(index); MS_EXCEPTION_IF_NULL(index);
std::vector<nlohmann::json> output_desc_list; std::vector<nlohmann::json> output_desc_list;
if (!data_input) { if (!data_input) {
MS_LOG(INFO) << "data input is optional node";
MS_LOG(INFO) << "Data input is optional node";
auto name = std::string(kOptional) + std::to_string(*index); auto name = std::string(kOptional) + std::to_string(*index);
(*data_str)[kJName] = name; (*data_str)[kJName] = name;
nlohmann::json output_desc; nlohmann::json output_desc;
@@ -766,7 +766,7 @@ bool TbeKernelBuild::GenFusionDataInputJson(const std::shared_ptr<mindspore::Anf
auto kernel_idx = AnfAlgo::VisitKernel(data_input, 0); auto kernel_idx = AnfAlgo::VisitKernel(data_input, 0);
auto real_node = kernel_idx.first; auto real_node = kernel_idx.first;
size_t real_idx = kernel_idx.second; size_t real_idx = kernel_idx.second;
MS_LOG(INFO) << "real name " << real_node->fullname_with_scope() << " index:" << real_idx;
MS_LOG(INFO) << "Real name " << real_node->fullname_with_scope() << " index:" << real_idx;
// kJOutputDesc // kJOutputDesc
nlohmann::json output_desc; nlohmann::json output_desc;
GenDescJson(real_node, real_idx, real_idx, &output_desc, fusion_data_type); GenDescJson(real_node, real_idx, real_idx, &output_desc, fusion_data_type);
@@ -842,18 +842,18 @@ bool TbeKernelBuild::GenFusionComputeInputJson(const mindspore::CNodePtr &cnode,
auto kernel_idx = AnfAlgo::VisitKernel(input, 0); auto kernel_idx = AnfAlgo::VisitKernel(input, 0);
auto real_node = kernel_idx.first; auto real_node = kernel_idx.first;
size_t real_idx = kernel_idx.second; size_t real_idx = kernel_idx.second;
MS_LOG(INFO) << "real name" << real_node->fullname_with_scope() << "index:" << real_idx;
MS_LOG(INFO) << "Real name" << real_node->fullname_with_scope() << "index:" << real_idx;
nlohmann::json input_desc; nlohmann::json input_desc;
GenDescJson(real_node, real_idx, real_idx, &input_desc); GenDescJson(real_node, real_idx, real_idx, &input_desc);
if (is_dynamic_input) { if (is_dynamic_input) {
MS_LOG(INFO) << "node has dynamic input.";
MS_LOG(INFO) << "Node has dynamic input.";
input_desc[kJDynIndex] = (i - 1); input_desc[kJDynIndex] = (i - 1);
} }
input_desc_list_tmp.emplace_back(input_desc); input_desc_list_tmp.emplace_back(input_desc);
} }
size_t optional_num = GetOptionalInput(cnode, is_dynamic_input); size_t optional_num = GetOptionalInput(cnode, is_dynamic_input);
if (optional_num > 0) { if (optional_num > 0) {
MS_LOG(INFO) << "node has optional input.";
MS_LOG(INFO) << "Node has optional input.";
for (size_t i = 0; i < optional_num; ++i) { for (size_t i = 0; i < optional_num; ++i) {
nlohmann::json optional_input_desc; nlohmann::json optional_input_desc;
optional_input_desc[kJName] = std::string(kOptional) + std::to_string(*index); optional_input_desc[kJName] = std::string(kOptional) + std::to_string(*index);
@@ -871,7 +871,7 @@ std::vector<size_t> TbeKernelBuild::GetDescOutputIndex(const std::vector<int> &o
std::vector<size_t> desc_output_index = {}; std::vector<size_t> desc_output_index = {};
for (size_t idx = 0; idx < output_used_nums.size(); ++idx) { for (size_t idx = 0; idx < output_used_nums.size(); ++idx) {
auto output_use_num_item = output_used_nums[idx]; auto output_use_num_item = output_used_nums[idx];
MS_LOG(INFO) << "output used num[" << idx << "] = " << output_use_num_item;
MS_LOG(INFO) << "Output used num[" << idx << "] = " << output_use_num_item;
desc_output_index.emplace_back(idx); desc_output_index.emplace_back(idx);
if (output_use_num_item > 1) { if (output_use_num_item > 1) {
desc_output_index.emplace_back(idx); desc_output_index.emplace_back(idx);
@@ -990,7 +990,7 @@ bool TbeKernelBuild::GetIOSize(const nlohmann::json &fusion_op_list,
auto op_output_desces = op[kJOutputDesc]; auto op_output_desces = op[kJOutputDesc];
if (output_node != real_node) { if (output_node != real_node) {
// tuple_get item // tuple_get item
MS_LOG(INFO) << "output is a tuple getitem node";
MS_LOG(INFO) << "Output is a tuple getitem node";
auto output_desc = op_output_desces[real_idx]; auto output_desc = op_output_desces[real_idx];
if (output_desc[kJShape].empty()) { if (output_desc[kJShape].empty()) {
MS_LOG(INFO) << "Fusion error: output_desc's shape is empty. real_index " << real_idx; MS_LOG(INFO) << "Fusion error: output_desc's shape is empty. real_index " << real_idx;


+ 1
- 1
mindspore/ccsrc/kernel/tbe/tbe_kernel_parallel_build.cc View File

@@ -77,7 +77,7 @@ bool TbeOpParallelPreBuild(const std::vector<AnfNodePtr> &anf_nodes) {
return true; return true;
} }


bool TbeOpParallelBuild(std::vector<AnfNodePtr> anf_nodes) {
bool TbeOpParallelBuild(const std::vector<AnfNodePtr> &anf_nodes) {
auto build_manger = std::make_shared<ParallelBuildManager>(); auto build_manger = std::make_shared<ParallelBuildManager>();
MS_EXCEPTION_IF_NULL(build_manger); MS_EXCEPTION_IF_NULL(build_manger);
set<std::string> processed_kernel; set<std::string> processed_kernel;


+ 1
- 1
mindspore/ccsrc/kernel/tbe/tbe_kernel_parallel_build.h View File

@@ -27,7 +27,7 @@
namespace mindspore { namespace mindspore {
namespace kernel { namespace kernel {
bool TbeOpParallelPreBuild(const std::vector<AnfNodePtr> &anf_nodes); bool TbeOpParallelPreBuild(const std::vector<AnfNodePtr> &anf_nodes);
bool TbeOpParallelBuild(std::vector<AnfNodePtr> anf_nodes);
bool TbeOpParallelBuild(const std::vector<AnfNodePtr> &anf_nodes);


struct KernelBuildTaskInfo { struct KernelBuildTaskInfo {
AnfNode *node; AnfNode *node;


+ 2
- 3
mindspore/ccsrc/kernel/tbe/tbe_kernel_select/tbe_kernel_broadcast_selecter.cc View File

@@ -20,7 +20,6 @@


namespace mindspore { namespace mindspore {
namespace kernel { namespace kernel {
constexpr char kDynInputKey[] = "dyn_input_sizes";
constexpr size_t kInputIndex_0 = 0; constexpr size_t kInputIndex_0 = 0;
constexpr size_t kChannelN = 0; constexpr size_t kChannelN = 0;
constexpr size_t kChannelC = 1; constexpr size_t kChannelC = 1;
@@ -34,9 +33,9 @@ bool TbeKernelBroadCastSelecter::GetShapeInfo(SupportFormat *support_format) {
output_num_ = 0; output_num_ = 0;
input_shapes_.clear(); input_shapes_.clear();
output_shapes_.clear(); output_shapes_.clear();
if (AnfAlgo::HasNodeAttr(kDynInputKey, cnode_ptr_)) {
if (AnfAlgo::HasNodeAttr(kAttrDynInputSizes, cnode_ptr_)) {
MS_LOG(INFO) << "This broadcast node has dynamic input."; MS_LOG(INFO) << "This broadcast node has dynamic input.";
auto dynamic_size_vec = AnfAlgo::GetNodeAttr<std::vector<int>>(cnode_ptr_, kDynInputKey);
auto dynamic_size_vec = AnfAlgo::GetNodeAttr<std::vector<int>>(cnode_ptr_, kAttrDynInputSizes);
if (dynamic_size_vec.empty() || dynamic_size_vec[0] < 2) { if (dynamic_size_vec.empty() || dynamic_size_vec[0] < 2) {
MS_LOG(EXCEPTION) << "dynamic attr set error, please check."; MS_LOG(EXCEPTION) << "dynamic attr set error, please check.";
} }


+ 2
- 3
mindspore/ccsrc/kernel/tbe/tbe_kernel_select/tbe_kernel_reduce_selecter.cc View File

@@ -23,7 +23,6 @@


namespace mindspore { namespace mindspore {
namespace kernel { namespace kernel {
constexpr char kKeepDims[] = "keep_dims";
constexpr char kAxis[] = "axis"; constexpr char kAxis[] = "axis";
constexpr char kTypeInt32[] = "Int32"; constexpr char kTypeInt32[] = "Int32";
constexpr size_t kInputIndex_0 = 0; constexpr size_t kInputIndex_0 = 0;
@@ -148,12 +147,12 @@ void TbeKernelReduceSelecter::GetReduceAttrAxis() {
} }


void TbeKernelReduceSelecter::GetReduceAttrKeepDim() { void TbeKernelReduceSelecter::GetReduceAttrKeepDim() {
if (!AnfAlgo::HasNodeAttr(kKeepDims, cnode_ptr_)) {
if (!AnfAlgo::HasNodeAttr(kAttrKeepDims, cnode_ptr_)) {
MS_LOG(INFO) << "This node does't have keep_attr."; MS_LOG(INFO) << "This node does't have keep_attr.";
keep_dims_ = false; keep_dims_ = false;
return; return;
} }
keep_dims_ = AnfAlgo::GetNodeAttr<bool>(cnode_ptr_, kKeepDims);
keep_dims_ = AnfAlgo::GetNodeAttr<bool>(cnode_ptr_, kAttrKeepDims);
} }


void TbeKernelReduceSelecter::AssignSupportFormat(const std::string &support_format_str, void TbeKernelReduceSelecter::AssignSupportFormat(const std::string &support_format_str,


+ 2
- 3
mindspore/ccsrc/kernel/tbe/tbe_kernel_select/tbe_kernel_select.cc View File

@@ -39,7 +39,6 @@ constexpr auto kDtype = "dtype";
constexpr auto kFormat = "format"; constexpr auto kFormat = "format";
constexpr auto kPrefixInput = "input"; constexpr auto kPrefixInput = "input";
constexpr auto kPrefixOutput = "output"; constexpr auto kPrefixOutput = "output";
constexpr char kDynInputKey[] = "dyn_input_sizes";
constexpr char kParamTypeDynamic[] = "dynamic"; constexpr char kParamTypeDynamic[] = "dynamic";
constexpr char kParamTypeRequre[] = "required"; constexpr char kParamTypeRequre[] = "required";
constexpr char kParamTypeOptional[] = "optional"; constexpr char kParamTypeOptional[] = "optional";
@@ -87,8 +86,8 @@ void TbeKernelSelect::GetCommonPatternKernelInfo(const OpInfo &op_info) {
auto primitive = AnfAlgo::GetCNodePrimitive(cnode_ptr_); auto primitive = AnfAlgo::GetCNodePrimitive(cnode_ptr_);
MS_EXCEPTION_IF_NULL(primitive); MS_EXCEPTION_IF_NULL(primitive);
std::vector<int> dyn_input_sizes; std::vector<int> dyn_input_sizes;
if (primitive->HasAttr(kDynInputKey)) {
dyn_input_sizes = GetValue<std::vector<int>>(primitive->GetAttr(kDynInputKey));
if (primitive->HasAttr(kAttrDynInputSizes)) {
dyn_input_sizes = GetValue<std::vector<int>>(primitive->GetAttr(kAttrDynInputSizes));
} }
// get real input/output num // get real input/output num
size_t real_input_tensor_num = AnfAlgo::GetInputTensorNum(cnode_ptr_); size_t real_input_tensor_num = AnfAlgo::GetInputTensorNum(cnode_ptr_);


+ 14
- 13
mindspore/ccsrc/kernel/tbe/tbe_utils.cc View File

@@ -59,14 +59,14 @@ void TbeUtils::SaveJsonInfo(const std::string &json_name, const std::string &inf
MS_LOG(INFO) << "json file exist, no need to create."; MS_LOG(INFO) << "json file exist, no need to create.";
return; return;
} }
std::ofstream filewrite;
filewrite.open(path);
if (!filewrite.is_open()) {
std::ofstream file_write;
file_write.open(path);
if (!file_write.is_open()) {
return; return;
} }
filewrite << info << std::endl;
filewrite.close();
if (nullptr == realpath(path.c_str(), real_path)) {
file_write << info << std::endl;
file_write.close();
if (realpath(path.c_str(), real_path) == nullptr) {
MS_LOG(INFO) << "dir: " << path << "does not exit."; MS_LOG(INFO) << "dir: " << path << "does not exit.";
return; return;
} }
@@ -144,12 +144,12 @@ uintptr_t KernelManager::GenFuncStub(const mindspore::kernel::KernelPack &kernel
auto kernel_json_info = kernel_pack.kernel_json_info(); auto kernel_json_info = kernel_pack.kernel_json_info();


*block_dim = kernel_json_info.block_dim; *block_dim = kernel_json_info.block_dim;
string funcname = kernel_json_info.kernel_name;
string func_name = kernel_json_info.kernel_name;
string magic = kernel_json_info.magic; string magic = kernel_json_info.magic;


if (!force_reload) { if (!force_reload) {
// use the cached object. // use the cached object.
auto iter = info_table_.find(funcname);
auto iter = info_table_.find(func_name);
if (iter != info_table_.end()) { if (iter != info_table_.end()) {
auto kernelmeta = iter->second; auto kernelmeta = iter->second;
*block_dim = kernelmeta->block_dim_; *block_dim = kernelmeta->block_dim_;
@@ -157,23 +157,24 @@ uintptr_t KernelManager::GenFuncStub(const mindspore::kernel::KernelPack &kernel
} }
} }
void *module = nullptr; void *module = nullptr;
if (0 != BinaryRegister((*kernel_pack.GetKernel()), &module, magic)) {
if (BinaryRegister((*kernel_pack.GetKernel()), &module, magic) != 0) {
MS_LOG(INFO) << "Call runtime BinaryRegister error."; MS_LOG(INFO) << "Call runtime BinaryRegister error.";
return 0; return 0;
} }
// to diff different funcs. // to diff different funcs.
uintptr_t funcstub = ++kernel_stub_gen_;
uintptr_t func_stub = ++kernel_stub_gen_;
if (RT_ERROR_NONE != if (RT_ERROR_NONE !=
rtFunctionRegister(module, reinterpret_cast<void *>(funcstub), funcname.c_str(), funcname.c_str(), 0)) {
rtFunctionRegister(module, reinterpret_cast<void *>(func_stub), func_name.c_str(), func_name.c_str(), 0)) {
MS_LOG(INFO) << "Call runtime rtFunctionRegister error."; MS_LOG(INFO) << "Call runtime rtFunctionRegister error.";
return 0; return 0;
} }
// cache the registered kernelmeta. // cache the registered kernelmeta.
info_table_[funcname] = std::make_shared<KernelMetaInfo>(KernelMetaInfo{funcstub, *block_dim});
return funcstub;
info_table_[func_name] = std::make_shared<KernelMetaInfo>(KernelMetaInfo{func_stub, *block_dim});
return func_stub;
} }


std::string KernelManager::GetStubFuncName(const KernelPackPtr &kernel_pack) { std::string KernelManager::GetStubFuncName(const KernelPackPtr &kernel_pack) {
MS_EXCEPTION_IF_NULL(kernel_pack);
auto kernel_json_info = kernel_pack->kernel_json_info(); auto kernel_json_info = kernel_pack->kernel_json_info();
return kernel_json_info.kernel_name; return kernel_json_info.kernel_name;
} }


Loading…
Cancel
Save