Browse Source

modify errno

pull/295/head
wangxiaotian22 4 years ago
parent
commit
7cc8d10a3c
8 changed files with 133 additions and 49 deletions
  1. +13
    -13
      parser/caffe/caffe_parser.cc
  2. +6
    -5
      parser/common/model_saver.cc
  3. +7
    -6
      parser/common/util.h
  4. +2
    -2
      parser/onnx/onnx_parser.cc
  5. +1
    -1
      parser/tensorflow/graph_functiondef.cc
  6. +1
    -1
      parser/tensorflow/graph_optimizer.cc
  7. +101
    -20
      parser/tensorflow/tensorflow_parser.cc
  8. +2
    -1
      parser/tensorflow/tensorflow_parser.h

+ 13
- 13
parser/caffe/caffe_parser.cc View File

@@ -74,7 +74,7 @@ using std::ifstream;

namespace ge {
graphStatus aclgrphParseCaffe(const char *model_file, const char *weights_file, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(model_file);
GetParserContext().type = domi::CAFFE;
std::map<string, string> options;
@@ -128,7 +128,7 @@ graphStatus aclgrphParseCaffe(const char *model_file, const char *weights_file,

graphStatus aclgrphParseCaffe(const char *model_file, const char *weights_file,
const std::map<AscendString, AscendString> &parser_params, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(model_file);
GetParserContext().type = domi::CAFFE;
std::map<string, string> options;
@@ -558,8 +558,8 @@ Status CaffeModelParser::ParseProtoFile(const string &proto_file, std::map<int32
Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google::protobuf::Message *message) {
int32_t copy_fd = mmDup(STDERR_FILENO);
if (copy_fd < 0) {
ErrorManager::GetInstance().ATCReportErrMessage("E19020", {"file"}, {"STDERR_FILENO"});
GELOGE(FAILED, "[Invoke][Dup] failed: %d.", copy_fd);
ErrorManager::GetInstance().ATCReportErrMessage("E19020", {"file", "errmsg"}, {"STDERR_FILENO", strerror(errno)});
GELOGE(FAILED, "[Invoke][Dup] failed:%d, reason:%s", copy_fd, strerror(errno));
return FAILED;
}

@@ -567,15 +567,15 @@ Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google:
if (fd < 0) {
(void)mmClose(copy_fd);
ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {kDevNull, strerror(errno)});
GELOGE(FAILED, "[Open][File] %s failed.", kDevNull);
GELOGE(FAILED, "[Open][File] %s failed. reason:%s", kDevNull, strerror(errno));
return FAILED;
}

if (mmDup2(fd, STDERR_FILENO) < 0) {
(void)mmClose(fd);
(void)mmClose(copy_fd);
ErrorManager::GetInstance().ATCReportErrMessage("E19020", {"file"}, {"STDERR_FILENO"});
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed.");
ErrorManager::GetInstance().ATCReportErrMessage("E19020", {"file", "errmsg"}, {"STDERR_FILENO", strerror(errno)});
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", strerror(errno));
return FAILED;
}

@@ -589,8 +589,8 @@ Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google:
if (mmDup2(copy_fd, STDERR_FILENO) < 0) {
(void)mmClose(fd);
(void)mmClose(copy_fd);
ErrorManager::GetInstance().ATCReportErrMessage("E19020", {"file"}, {"STDERR_FILENO"});
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed.");
ErrorManager::GetInstance().ATCReportErrMessage("E19020", {"file", "errmsg"}, {"STDERR_FILENO", strerror(errno)});
GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", strerror(errno));
return FAILED;
}
(void)mmClose(fd);
@@ -1483,7 +1483,7 @@ Status CaffeModelParser::PreCheck(const domi::caffe::NetParameter &net) {
}

Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::ComputeGraphPtr &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
bool has_error = false;

GE_CHK_BOOL_RET_STATUS(data != nullptr, FAILED, "[Check][Param]model data is nullptr.");
@@ -1614,7 +1614,7 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co
}

Status CaffeModelParser::Parse(const char *model_path, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(model_path);
ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
GE_CHECK_NOTNULL(compute_graph);
@@ -1904,7 +1904,7 @@ Status CaffeModelParser::ReorderInput(domi::caffe::NetParameter &net) {
}

Status CaffeWeightsParser::ParseFromMemory(const char *data, uint32_t size, ge::ComputeGraphPtr &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
if (data == nullptr) {
REPORT_INNER_ERROR("E19999", "param data is nullptr.");
GELOGE(PARAM_INVALID, "[Check][Param]Caffe weights data is nullptr");
@@ -1935,7 +1935,7 @@ Status CaffeWeightsParser::ParseFromMemory(const char *data, uint32_t size, ge::
}

Status CaffeWeightsParser::Parse(const char *file, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(file);
ge::ComputeGraphPtr compute_graph = ge::GraphUtils::GetComputeGraph(graph);
GE_CHECK_NOTNULL(compute_graph);


+ 6
- 5
parser/common/model_saver.cc View File

@@ -81,8 +81,8 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelSaver::SaveJsonToFi
}
// Close file
if (mmClose(fd) != EN_OK) {
REPORT_INNER_ERROR("E19999", "close file:%s failed", file_path);
GELOGE(FAILED, "[Close][File] %s failed.", file_path);
REPORT_INNER_ERROR("E19999", "close file:%s failed. errmsg:%s", file_path, strerror(errno));
GELOGE(FAILED, "[Close][File] %s failed. errmsg:%s", file_path, strerror(errno));
ret = FAILED;
}
return ret;
@@ -137,8 +137,8 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY int ModelSaver::CreateDirectory
if (ret != 0) {
if (errno != EEXIST) {
ErrorManager::GetInstance().ATCReportErrMessage("E19006", {"path"}, {directory_path});
GELOGW("Can not create directory %s. Make sure the directory exists and writable.",
directory_path.c_str());
GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s",
directory_path.c_str(), strerror(errno));
return ret;
}
}
@@ -149,7 +149,8 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY int ModelSaver::CreateDirectory
if (ret != 0) {
if (errno != EEXIST) {
ErrorManager::GetInstance().ATCReportErrMessage("E19006", {"path"}, {directory_path});
GELOGW("Can not create directory %s. Make sure the directory exists and writable.", directory_path.c_str());
GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s",
directory_path.c_str(), strerror(errno));
return ret;
}
}


+ 7
- 6
parser/common/util.h View File

@@ -74,12 +74,13 @@
} while (0)

// Check if the parameter is null. If yes, return PARAM_INVALID and record the error
#define GE_CHECK_NOTNULL(val) \
do { \
if (val == nullptr) { \
GELOGE(ge::FAILED, "param[%s] must not be null.", #val); \
return ge::PARAM_INVALID; \
} \
#define GE_CHECK_NOTNULL(val) \
do { \
if (val == nullptr) { \
GELOGE(ge::FAILED, "param[%s] must not be null.", #val); \
REPORT_INNER_ERROR("E19999", "Param:%s is nullptr, check invalid", #val); \
return ge::PARAM_INVALID; \
} \
} while (0)

// Check whether the parameter is null. If so, execute the exec_expr expression and record the error log


+ 2
- 2
parser/onnx/onnx_parser.cc View File

@@ -936,7 +936,7 @@ Status OnnxModelParser::ModelParseToGraphImpl(bool is_subgraph, ge::onnx::GraphP
}

Status OnnxModelParser::Parse(const char *file, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
ge::onnx::ModelProto onnx_model;
Status ret = GetModelFromFile(file, onnx_model);
if (ret != SUCCESS) {
@@ -952,7 +952,7 @@ Status OnnxModelParser::Parse(const char *file, ge::Graph &graph) {
}

Status OnnxModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
ge::onnx::ModelProto onnx_model;
Status ret = GetModelFromMemory(data, size, onnx_model);
if (ret != SUCCESS) {


+ 1
- 1
parser/tensorflow/graph_functiondef.cc View File

@@ -468,7 +468,7 @@ domi::Status GraphToFunctionDef::DavGraphToFunctionDef(ge::ComputeGraphPtr graph
// Add regular inputs
for (auto anchor : in_anchors) {
GE_IF_BOOL_EXEC(anchor == nullptr,
REPORT_CALL_ERROR("E19999", "Nonconsecutive input edges; missing input edge for node %s",
REPORT_INNER_ERROR("E19999", "Nonconsecutive input edges; missing input edge for node %s",
node_def_.name().c_str());
GELOGE(domi::INTERNAL_ERROR, "Nonconsecutive input edges; missing input edge , for node %s .",
node_def_.name().c_str());


+ 1
- 1
parser/tensorflow/graph_optimizer.cc View File

@@ -1273,7 +1273,7 @@ Status CreateFuncDefBytes(ge::NodePtr n, string original_type, string func_bin_p
char *buf = nullptr;
int32_t len = 0;
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(!ge::parser::ReadBytesFromBinaryFile(file.c_str(), &buf, len),
REPORT_CALL_ERROR("E19999", "Read bytes for file:%s failed", file.c_str());
REPORT_CALL_ERROR("E19999", "Read bytes from file:%s failed", file.c_str());
return false,
"read bytes file error!");



+ 101
- 20
parser/tensorflow/tensorflow_parser.cc View File

@@ -88,7 +88,7 @@ using ge::parser::ModelSaver;

namespace ge {
graphStatus aclgrphParseTensorFlow(const char *model_file, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(model_file);
GetParserContext().type = domi::TENSORFLOW;
std::map<string, string> options;
@@ -136,7 +136,7 @@ graphStatus aclgrphParseTensorFlow(const char *model_file, ge::Graph &graph) {

graphStatus aclgrphParseTensorFlow(const char *model_file, const std::map<AscendString, AscendString> &parser_params,
ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(model_file);
GetParserContext().type = domi::TENSORFLOW;
std::map<string, string> options;
@@ -240,7 +240,7 @@ Status GenSubgraphParseTasks(const ge::ComputeGraphPtr &parent_graph, std::deque
}
auto ret = ge::NodeUtils::SetSubgraph(*node, i, subgraph);
if (ret != SUCCESS) {
REPORT_CALL_ERROR("E19999", "Set subgraph:%s to node:%s(%s) failed, index:%s",
REPORT_CALL_ERROR("E19999", "Set subgraph:%s to node:%s(%s) failed, index:%u",
subgraph_iname.c_str(), node->GetName().c_str(), node->GetType().c_str(), i);
GELOGE(ret, "Failed to set subgraph %s to node %s index %u", subgraph_iname.c_str(), node->GetName().c_str(),
i);
@@ -334,6 +334,8 @@ Status TensorFlowModelParser::DefunToPartitionedCall(const domi::tensorflow::Nod
for (size_t i = 0; i < input_tensor_num; ++i) {
ge::GeTensorDesc input_tensor;
if (op->AddInputDesc(input_tensor) != ge::GRAPH_SUCCESS) {
REPORT_CALL_ERROR("E19999", "Add input desc to op:%s(%s) failed",
op->GetName().c_str(), op->GetType().c_str());
GELOGE(FAILED, "op [%s] type[%s] add input(%zu) tensor failed.", op_name.c_str(), op->GetType().c_str(), i);
return FAILED;
}
@@ -342,6 +344,8 @@ Status TensorFlowModelParser::DefunToPartitionedCall(const domi::tensorflow::Nod
for (size_t i = 0; i < output_tensor_num; ++i) {
ge::GeTensorDesc output_tensor;
if (op->AddOutputDesc(output_tensor) != ge::GRAPH_SUCCESS) {
REPORT_CALL_ERROR("E19999", "Add output desc to op:%s(%s) failed",
op->GetName().c_str(), op->GetType().c_str());
GELOGE(FAILED, "op [%s] type[%s] add output(%zu) tensor failed.", op_name.c_str(), op->GetType().c_str(), i);
return FAILED;
}
@@ -410,6 +414,8 @@ Status TensorFlowModelParser::ParseOpParams(const domi::tensorflow::NodeDef *nod
ge::Operator op_src(node_def->name(), node_def->op());
status = domi::AutoMappingFn(node_def, op_src);
if (status != SUCCESS) {
REPORT_CALL_ERROR("E19999", "Auto mapping node_def:%s(%s) to operator failed",
node_def->name().c_str(), node_def->op().c_str());
GELOGE(status, "Node[%s] auto mapping failed.", node_name.c_str());
return status;
}
@@ -499,6 +505,7 @@ Status TensorFlowModelParser::AddNode(const domi::tensorflow::NodeDef *node_def,
// Find all children of the fusion operator
auto iter = fusion_op_nodedef_map_.find(node_def->name());
if (iter == fusion_op_nodedef_map_.end()) {
REPORT_INNER_ERROR("E19999", "FusionOp node %s has no children node, check invalid", node_name.c_str());
GELOGE(FAILED, "FusionOp node %s has no children node!", node_name.c_str());
return INTERNAL_ERROR;
}
@@ -786,7 +793,11 @@ Status TensorFlowModelParser::CheckOpShapeDim(const domi::tensorflow::NodeDef *n
bool is_attr_exist =
ge::TensorFlowUtil::FindAttrValue(node_def, ge::parser::ATTR_NAME_INPUT_TENSOR_DESC, input_attr_value);
GE_IF_BOOL_EXEC(!is_attr_exist, return SUCCESS);
GE_CHK_BOOL_EXEC(input_attr_value.has_list(), return PARAM_INVALID, "output attr value vector is empty");
GE_CHK_BOOL_EXEC(input_attr_value.has_list(),
REPORT_INNER_ERROR("E19999", "Attr:%s of node_def:%s(%s) is empty, check invalid",
ge::parser::ATTR_NAME_INPUT_TENSOR_DESC.c_str(),
node_def->name().c_str(), node_def->op().c_str());
return PARAM_INVALID, "output attr value vector is empty");

// list contain many TensorDescriptors
domi::tensorflow::AttrValue_ListValue a_list = input_attr_value.list();
@@ -850,7 +861,9 @@ ge::DataType TensorFlowModelParser::ConvertToGeDataType(const uint32_t type) {

Status TensorFlowModelParser::ParseNodeDef(TensorFlowModelParser *parser, ge::ComputeGraphPtr &graph,
std::mutex *graphMutex, shared_ptr<ge::ScopeGraph> &scope_graph,
const domi::tensorflow::NodeDef *node_def) {
const domi::tensorflow::NodeDef *node_def,
error_message::Context error_context) {
ErrorManager::GetInstance().SetErrorContext(error_context);
// The caller guarantees that the pointer is not null
string node_name = node_def->name();
string node_op = node_def->op();
@@ -861,7 +874,10 @@ Status TensorFlowModelParser::ParseNodeDef(TensorFlowModelParser *parser, ge::Co
}

auto iterator = parser->adaptedOpTypeMap_.find(node_name);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(iterator == parser->adaptedOpTypeMap_.end(), return FAILED,
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(iterator == parser->adaptedOpTypeMap_.end(),
REPORT_INNER_ERROR("E19999", "get adapted op type failed, node name = %s",
node_name.c_str());
return FAILED,
"get adapted op type failed, node name = %s", node_name.c_str());

string op_type = iterator->second;
@@ -901,6 +917,8 @@ Status TensorFlowModelParser::ParseNodeDef(TensorFlowModelParser *parser, ge::Co
}
return SUCCESS;
} else {
REPORT_INPUT_ERROR("E12011", std::vector<std::string>({"opname", "optype"}),
std::vector<std::string>({node_name, op_type}));
GELOGE(INTERNAL_ERROR, "op[%s] type[%s] have no ir factory.]", node_name.c_str(), op_type.c_str());
return FAILED;
}
@@ -953,7 +971,11 @@ Status TensorFlowModelParser::ParseNodeDef(TensorFlowModelParser *parser, ge::Co
node = graph->AddNode(op);
}

GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((node == nullptr), return INTERNAL_ERROR, "add node failed.");
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((node == nullptr),
REPORT_CALL_ERROR("E19999", "Add node:%s(%s) to graph:%s failed",
op->GetName().c_str(), op->GetType().c_str(),
graph->GetName().c_str());
return INTERNAL_ERROR, "add node failed.");

if (needFusion) {
shared_ptr<OpParser> fusion_op_parser = factory->CreateFusionOpParser(op_type);
@@ -1018,7 +1040,8 @@ Status TensorFlowModelParser::AddFmkNode(ge::ComputeGraphPtr &graph, shared_ptr<
const domi::tensorflow::NodeDef *node_def = nodedef_map_[op_node_name];
GE_CHECK_NOTNULL(node_def);
std::future<Status> f =
executor.commit(TensorFlowModelParser::ParseNodeDef, this, graph_tmp, &graphMutex, scope_graph, node_def);
executor.commit(TensorFlowModelParser::ParseNodeDef, this, graph_tmp, &graphMutex, scope_graph, node_def,
ErrorManager::GetInstance().GetErrorManagerContext());
if (!f.valid()) {
GELOGE(FAILED, "Future is invalid");
return FAILED;
@@ -1048,7 +1071,10 @@ Status TensorFlowModelParser::AddNodeToGraphAndMarkFormat(ge::ComputeGraphPtr &g
for (size_t j = 0; j < op_node_list_size; j++) {
const string op_node_name = op_node_name_list[j];
auto iterator = node_map_.find(op_node_name);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((iterator == node_map_.end()), return INTERNAL_ERROR, "add node failed.");
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((iterator == node_map_.end()),
REPORT_INNER_ERROR("E19999", "node:%s can't find in node_map_, check invalid",
op_node_name.c_str());
return INTERNAL_ERROR, "add node failed.");
GE_CHECK_NOTNULL(iterator->second);
GE_CHK_STATUS_RET(iterator->second->SetOwnerComputeGraph(graph), "set owner compute graph failed");
graph->AddNode(iterator->second);
@@ -1074,6 +1100,7 @@ Status TensorFlowModelParser::ExcuteScopeFusionPasses(domi::tensorflow::GraphDef
ge::StringUtils::Split(ge::GetParserContext().enable_scope_fusion_passes, ',');
auto &impl = ge::ScopeFusionPassRegistry::GetInstance().impl_;
if (impl == nullptr) {
REPORT_INNER_ERROR("E19999", "ScopeFusionPassRegistry is not properly initialized.");
GELOGE(ge::MEMALLOC_FAILED, "ScopeFusionPassRegistry is not properly initialized.");
return ge::MEMALLOC_FAILED;
}
@@ -1098,7 +1125,7 @@ Status TensorFlowModelParser::ExcuteScopeFusionPasses(domi::tensorflow::GraphDef
}

Status TensorFlowModelParser::ParseFromMemory(const char *data, uint32_t size, ge::ComputeGraphPtr &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(data);
GE_CHECK_NOTNULL(graph);

@@ -1106,7 +1133,9 @@ Status TensorFlowModelParser::ParseFromMemory(const char *data, uint32_t size, g
domi::tensorflow::GraphDef OriDef;

bool read = ge::parser::ReadProtoFromArray(data, static_cast<int>(size), &OriDef);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(!read, return INTERNAL_ERROR, "read_proto_from_binary failed.");
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(!read,
REPORT_INNER_ERROR("E19999", "read graph proto from binary failed");
return INTERNAL_ERROR, "read_proto_from_binary failed.");

domi::tensorflow::GraphDef graph_def;
if (ge::GetParserContext().input_dims.empty() && ge::GetParserContext().out_nodes_map.empty()) {
@@ -1190,12 +1219,14 @@ Status TensorFlowModelParser::ParseFromMemory(const char *data, uint32_t size, g
const string op_node_name = op_node_name_list[i];
const domi::tensorflow::NodeDef *node_def = nodedef_map_[op_node_name_list[i]];
if (node_def == nullptr) {
REPORT_INNER_ERROR("E19999", "Node:%s can't find in nodedef_map_, check invalid", op_node_name.c_str());
GELOGE(INTERNAL_ERROR, "Node def is nullptr, name:%s.", op_node_name.c_str());
DeleteFuisonNodeDef();
return INTERNAL_ERROR;
}
const string &node_op = node_def->op();
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((tensorflow_op_map.find(node_op) == tensorflow_op_map.end()), DeleteFuisonNodeDef();
REPORT_INNER_ERROR("E19999", "Op type %s unsupport", node_op.c_str());
return INTERNAL_ERROR, "Unsupport op type %s", node_op.c_str());

ret = AddNode(node_def, graph, scope_graph);
@@ -1241,7 +1272,7 @@ Status TensorFlowModelParser::GetFunctionProto(const string &file,
}

Status TensorFlowModelParser::Parse(const char *model_path, ge::Graph &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(model_path);
ge::ComputeGraphPtr root_graph = ge::GraphUtils::GetComputeGraph(graph);
GE_CHECK_NOTNULL(root_graph);
@@ -1335,7 +1366,7 @@ Status TensorFlowModelParser::Parse(const char *model_path, ge::ComputeGraphPtr
}

Status TensorFlowModelParser::ParseAllGraph(const google::protobuf::Message *proto, ge::ComputeGraphPtr &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
GE_CHECK_NOTNULL(proto);
GE_CHECK_NOTNULL(graph);

@@ -1432,6 +1463,7 @@ Status TensorFlowModelParser::ParseAllGraph(const google::protobuf::Message *pro
const string op_node_name = op_node_name_list[i];
const domi::tensorflow::NodeDef *node_def = nodedef_map_[op_node_name_list[i]];
if (node_def == nullptr) {
REPORT_INNER_ERROR("E19999", "Node:%s can't find in nodedef_map_, check invalid", op_node_name.c_str());
GELOGE(INTERNAL_ERROR, "Cannot find [%s] in nodedef map.", op_node_name_list[i].c_str());
DeleteFuisonNodeDef();
return INTERNAL_ERROR;
@@ -1635,6 +1667,7 @@ Status TensorFlowModelParser::RunScopeFusionPass(const vector<string> &scope_pas
GE_CHECK_NOTNULL(scope_graph);
auto &impl = ge::ScopeFusionPassRegistry::GetInstance().impl_;
if (impl == nullptr) {
REPORT_INNER_ERROR("E19999", "ScopeFusionPassRegistry is not properly initialized.");
GELOGE(ge::MEMALLOC_FAILED, "ScopeFusionPassRegistry is not properly initialized.");
return ge::MEMALLOC_FAILED;
}
@@ -1699,7 +1732,9 @@ bool TensorFlowModelParser::MaybeFusionOp(shared_ptr<ge::ScopeGraph> &scope_grap
}

bool TensorFlowModelParser::IsFusionOpChild(const string &node_name, ge::ScopeFusionOpInfo *info) {
GE_CHK_BOOL_EXEC(info != nullptr, return false, "fusion info is null.");
GE_CHK_BOOL_EXEC(info != nullptr,
REPORT_CALL_ERROR("E19999", "Param info is nullptr, check invalid");
return false, "fusion info is null.");
// 1.View in full match fusion strategy first
// 2.View in scope fusion policy then
auto iter = fusion_op_children_.find(node_name);
@@ -2168,7 +2203,7 @@ Status TensorFlowWeightsParser::ParseFromMemory(const char *data, uint32_t size,
Status TensorFlowWeightsParser::Parse(const char *file, ge::Graph &graph) { return SUCCESS; }

Status TensorFlowModelParser::ParseProto(const google::protobuf::Message *proto, ge::ComputeGraphPtr &graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
ErrorManager::GetInstance().GenWorkStreamIdDefault();
PARSER_TIMESTAMP_START(ParseProto);
GE_CHECK_NOTNULL(proto);
@@ -2296,7 +2331,7 @@ Status TensorFlowModelParser::ParseProto(const google::protobuf::Message *proto,

Status TensorFlowModelParser::ParseProtoWithSubgraph(const google::protobuf::Message *root_proto,
domi::GetGraphCallback callback, ge::ComputeGraphPtr &root_graph) {
ErrorManager::GetInstance().SetStage(ErrorMessage::kModelCompile, ErrorMessage::kParser);
ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser);
ErrorManager::GetInstance().GenWorkStreamIdDefault();
GE_CHECK_NOTNULL(root_proto);
GE_CHECK_NOTNULL(callback);
@@ -2314,6 +2349,7 @@ Status TensorFlowModelParser::ParseProtoWithSubgraph(const google::protobuf::Mes
if (arg.proto == nullptr) {
auto proto = callback(root_proto, arg.function_name);
if (proto == nullptr) {
REPORT_CALL_ERROR("E19999", "callback execute failed, func_name:%s", arg.function_name.c_str());
GELOGE(FAILED, "Failed to get function by name %s", arg.function_name.c_str());
return FAILED;
}
@@ -2350,12 +2386,20 @@ Status TensorFlowModelParser::ParseProtoWithSubgraph(const google::protobuf::Mes
Status TensorFlowModelParser::OptimizeIdentityByOutput(map<string, NodeDef *> &nodedef_map,
const string &curr_node_name, bool &clear_input_flag) {
auto context_iter = op_node_context_map_.find(curr_node_name);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((context_iter == op_node_context_map_.end()), return INTERNAL_ERROR,
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((context_iter == op_node_context_map_.end()),
REPORT_INNER_ERROR("E19999",
"Node:%s can't find in op_node_context_map_, check invalid",
curr_node_name.c_str());
return INTERNAL_ERROR,
"Can't find op node context.");
OpNodeContext op_node_context = context_iter->second;

auto node_def_iter = nodedef_map.find(curr_node_name);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((node_def_iter == nodedef_map.end()), return INTERNAL_ERROR, "Can't find nodedef");
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((node_def_iter == nodedef_map.end()),
REPORT_INNER_ERROR("E19999",
"Node:%s can't find in nodedef_map, check invalid",
curr_node_name.c_str());
return INTERNAL_ERROR, "Can't find nodedef");
domi::tensorflow::NodeDef *curr_node_def = node_def_iter->second;
GE_CHECK_NOTNULL(curr_node_def);
bool has_out_retval = false;
@@ -2420,12 +2464,17 @@ Status TensorFlowModelParser::OptimizeSnapShot(domi::tensorflow::NodeDef *curr_m
const std::vector<string> &control_list) {
GE_CHECK_NOTNULL(curr_mode_def);
if (curr_mode_def == nullptr) {
REPORT_INNER_ERROR("E19999", "Param curr_mode_def is nullptr, check invalid");
GELOGE(FAILED, "input param is nullptr.");
return PARAM_INVALID;
}
string curr_node_name = curr_mode_def->name();
auto context_iter = op_node_context_map_.find(curr_node_name);
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((context_iter == op_node_context_map_.end()), return INTERNAL_ERROR,
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((context_iter == op_node_context_map_.end()),
REPORT_INNER_ERROR("E19999",
"Node:%s can't find in op_node_context_map_, check invalid",
curr_node_name.c_str());
return INTERNAL_ERROR,
"Can't find op node context.");
OpNodeContext op_node_context = context_iter->second;

@@ -2514,6 +2563,8 @@ Status TensorFlowModelParser::GraphDefOptimizeSnapShot(domi::tensorflow::GraphDe
}
}
if (data_input_cnt != 1) {
REPORT_INNER_ERROR("E19999", "Node:%s's input data size:%u not equal to 1, check invalid",
curr_node_def->name().c_str(), data_input_cnt);
GELOGE(FAILED, "%s op data input size %u invalid", curr_node_def->name().c_str(), data_input_cnt);
return FAILED;
}
@@ -2575,6 +2626,7 @@ void TensorFlowModelParser::OptimizeDestroyTemporaryVariable(domi::tensorflow::G
Status TensorFlowModelParser::GraphDefOptimizeDestroyTemporaryVariable(domi::tensorflow::GraphDef *graph_def,
domi::tensorflow::NodeDef *nodeCurrent) {
if (graph_def == nullptr || nodeCurrent == nullptr) {
REPORT_INNER_ERROR("E19999", "Param graph_def or nodeCurrent is nullptr, check invalid");
GELOGE(FAILED, "input param is nullptr.");
return FAILED;
}
@@ -2608,6 +2660,8 @@ Status TensorFlowModelParser::GraphDefOptimizeDestroyTemporaryVariable(domi::ten
}
}
if (!clearInputFlag) {
REPORT_INNER_ERROR("E19999", "Optimize DestroyTemporaryVariable failed, node name is :%s.",
nodeCurrent->name().c_str());
GELOGE(INTERNAL_ERROR, "Optimize DestroyTemporaryVariable failed, node name is :%s.", nodeCurrent->name().c_str());
return FAILED;
}
@@ -3106,12 +3160,17 @@ Status TensorFlowModelParser::FusionNodeParseParams(shared_ptr<OpParser> &op_par
shared_ptr<TensorFlowFusionOpParser> tensorflow_fusion_op_parser =
std::dynamic_pointer_cast<TensorFlowFusionOpParser>(op_parser);
GE_IF_BOOL_EXEC(tensorflow_fusion_op_parser == nullptr,
REPORT_INNER_ERROR("E19999", "Param op_parser is not TensorFlowFusionOpParser Type, check invalid");
GELOGE(FAILED, "node :%s can not get fusion parser, please check!", node_def->name().c_str());
return INTERNAL_ERROR);

// Find all children of the fusion operator
auto iter = fusion_op_nodedef_map_.find(node_def->name());
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(iter == fusion_op_nodedef_map_.end(), return INTERNAL_ERROR,
GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(iter == fusion_op_nodedef_map_.end(),
REPORT_INNER_ERROR("E19999",
"Node:%s can't find in fusion_op_nodedef_map_, check invalid",
node_def->name().c_str());
return INTERNAL_ERROR,
"FusionOp node %s has no children node!", node_def->name().c_str());

(void)ge::AttrUtils::SetStr(node->GetOpDesc(), ge::ATTR_NAME_FUSIONOP_ORIGINAL_TYPE, node_def->op());
@@ -3128,6 +3187,8 @@ Status TensorFlowModelParser::FusionNodeParseParams(shared_ptr<OpParser> &op_par
ge::Operator op_src(node_def_src->name(), node_def_src->op());
status = domi::AutoMappingFn(node_def_src, op_src);
if (status != SUCCESS) {
REPORT_CALL_ERROR("E19999", "Auto mapping node_def:%s(%s) to operator failed",
node_def_src->name().c_str(), node_def_src->op().c_str());
GELOGE(status, "Node[%s] auto mapping failed", node_def_src->name().c_str());
return status;
}
@@ -3137,6 +3198,8 @@ Status TensorFlowModelParser::FusionNodeParseParams(shared_ptr<OpParser> &op_par
ge::GeTensorDesc tensor_desc;
tensor_desc.SetName(node_def_src->input(i));
if (op_desc->AddInputDesc(tensor_desc) != GRAPH_SUCCESS) {
REPORT_CALL_ERROR("E19999", "Add input desc to op:%s(%s) failed",
op_desc->GetName().c_str(), op_desc->GetType().c_str());
GELOGE(FAILED, "Op [%s] type[%s] add input(%d) tensor failed.", op_desc->GetName().c_str(),
op_desc->GetType().c_str(), i);
return FAILED;
@@ -3230,6 +3293,9 @@ Status TensorFlowModelParser::OptimizeConstNodes4CustomOp(domi::tensorflow::Grap
} else if (it.moveType == domi::OMG_INPUT_REORDER) {
auto inputs = current_node->input();
if (static_cast<size_t>(inputs.size()) != it.input_order.size()) {
REPORT_INNER_ERROR("E19999", "Input size of node:%s(%s) is mismatched, new order size:%zu, input size:%d",
current_node->name().c_str(), current_node->op().c_str(),
it.input_order.size(), inputs.size());
GELOGE(INTERNAL_ERROR, "Size of input is mismatched, new order size is %zu, input size is %d.",
it.input_order.size(), inputs.size());
return INTERNAL_ERROR;
@@ -3237,6 +3303,8 @@ Status TensorFlowModelParser::OptimizeConstNodes4CustomOp(domi::tensorflow::Grap
for (size_t i = 0; i < it.input_order.size(); ++i) {
int new_index = it.input_order[i];
if (new_index < 0 || new_index >= inputs.size()) {
REPORT_INNER_ERROR("E19999", "New order of %s has invalid index %d, out of range(0, %d)",
it_node_map.first.c_str(), new_index, inputs.size());
GELOGE(INTERNAL_ERROR, "New order of %s has invalid index %d.", it_node_map.first.c_str(), new_index);
return INTERNAL_ERROR;
}
@@ -3268,6 +3336,7 @@ Status TensorFlowModelParser::AddControlEdgeAfterRemoveInputs(domi::tensorflow::
string input_node_name = NodeNameFromInput(remove_input);
auto it = all_node_map.find(input_node_name);
if (it == all_node_map.end()) {
REPORT_INNER_ERROR("E19999", "Node:%s can't find in all_node_map, check invalid", input_node_name.c_str());
GELOGE(FAILED, "Can not find node name:%s in all node map.", input_node_name.c_str());
return FAILED;
}
@@ -3413,6 +3482,7 @@ Status TensorFlowModelParser::RemoveIsolateNode(domi::tensorflow::GraphDef *grap
domi::tensorflow::NodeDef *node = graph_def->mutable_node(i);
const string &node_name = node->name();
if (node_inputs_outputs_map_.find(node_name) == node_inputs_outputs_map_.end()) {
REPORT_INNER_ERROR("E19999", "Node:%s can't find in node_inputs_outputs_map_, check invalid", node_name.c_str());
GELOGE(FAILED, "Can not find input output context, node:%s.", node_name.c_str());
return FAILED;
}
@@ -3467,6 +3537,9 @@ Status TensorFlowModelParser::RecordFusionResult(std::shared_ptr<ge::ScopeGraph>
}

if (fusion_output.second[i] >= static_cast<int32_t>(op_desc->GetOutputsSize())) {
REPORT_INNER_ERROR("E19999", "fusion output index:%d of node:%s(%s) must less than outputs desc size %zu.",
fusion_output.second[i], op_desc->GetName().c_str(), op_desc->GetType().c_str(),
op_desc->GetOutputsSize());
GELOGE(PARAM_INVALID, "fusion output index %d must less than outputs desc size %zu.", fusion_output.second[i],
op_desc->GetOutputsSize());
return PARAM_INVALID;
@@ -3648,6 +3721,7 @@ Status TensorFlowModelParser::UpdateInnerNodeContext(const string &fusion_op_nam
const std::vector<std::string> &inner_nodes_name) {
auto fusion_iter = op_node_context_map_.find(fusion_op_name);
if (fusion_iter == op_node_context_map_.end()) {
REPORT_INNER_ERROR("E19999", "Node:%s can't find in op_node_context_map_, check invalid", fusion_op_name.c_str());
GELOGE(INTERNAL_ERROR, "Can't find context for fusion node %s.", fusion_op_name.c_str());
return INTERNAL_ERROR;
}
@@ -3774,6 +3848,7 @@ Status TensorFlowModelParser::AddScopeInnerNode(TensorFlowModelParser *parser, g
string node_op = node_def->op();
auto iter = parser->scope_inner_node_map_.find(node_name);
if (iter == parser->scope_inner_node_map_.end()) {
REPORT_INNER_ERROR("E19999", "Node:%s can't find in scope_inner_node_map_, check invalid", node_name.c_str());
GELOGE(PARAM_INVALID, "Failed to find scope inner node:%s, type:%s.", node_name.c_str(), node_op.c_str());
return PARAM_INVALID;
}
@@ -3786,6 +3861,8 @@ Status TensorFlowModelParser::AddScopeInnerNode(TensorFlowModelParser *parser, g
node = graph->AddNode(op_desc);
}
if (node == nullptr) {
REPORT_CALL_ERROR("E19999", "Add node:%s(%s) to graph:%s failed",
op_desc->GetName().c_str(), op_desc->GetType().c_str(), graph->GetName().c_str());
GELOGE(INTERNAL_ERROR, "Failed to Add scope inner node:%s, type:%s.", op_desc->GetName().c_str(),
op_desc->GetType().c_str());
return INTERNAL_ERROR;
@@ -3839,6 +3916,8 @@ Status TensorFlowModelParser::CheckAndUpdateInputDesc(ge::ComputeGraphPtr &compu
ge::GeTensorDesc tensor_desc;
auto ret = op_desc->UpdateInputDesc(static_cast<uint32_t>(in_anchor->GetIdx()), tensor_desc);
if (ret != ge::GRAPH_SUCCESS) {
REPORT_CALL_ERROR("E19999", "Update index:%d of input desc in op:%s(%s) failed", in_anchor->GetIdx(),
op_desc->GetName().c_str(), op_desc->GetType().c_str());
GELOGE(ret, "Failed to update input desc, node:%s, index:%d.", node->GetName().c_str(), in_anchor->GetIdx());
return ret;
}
@@ -3848,6 +3927,8 @@ Status TensorFlowModelParser::CheckAndUpdateInputDesc(ge::ComputeGraphPtr &compu
ge::GeTensorDesc tensor_desc(ge::GeShape(), FORMAT_RESERVED, DT_UNDEFINED);
auto ret = op_desc->UpdateInputDesc(static_cast<uint32_t>(in_anchor->GetIdx()), tensor_desc);
if (ret != ge::GRAPH_SUCCESS) {
REPORT_CALL_ERROR("E19999", "Update index:%d of input desc in op:%s(%s) failed", in_anchor->GetIdx(),
op_desc->GetName().c_str(), op_desc->GetType().c_str());
GELOGE(ret, "Failed to update input desc, node:%s, index:%d.", node->GetName().c_str(), in_anchor->GetIdx());
return ret;
}


+ 2
- 1
parser/tensorflow/tensorflow_parser.h View File

@@ -565,7 +565,8 @@ class PARSER_FUNC_VISIBILITY TensorFlowModelParser : public domi::ModelParser {
*
*/
static Status ParseNodeDef(TensorFlowModelParser *parser, ge::ComputeGraphPtr &graph, std::mutex *graphMutex,
shared_ptr<ge::ScopeGraph> &scope_graph, const domi::tensorflow::NodeDef *node_def);
shared_ptr<ge::ScopeGraph> &scope_graph, const domi::tensorflow::NodeDef *node_def,
error_message::Context error_context);

/**
* @ingroup domi_omg


Loading…
Cancel
Save