| Author | SHA1 | Message | Date |
|---|---|---|---|
|
|
73b3f3a063 |
!452 code check clean
Merge pull request !452 from 徐睿/r1.7.0 |
4 years ago |
|
|
5980eeaec2 |
!449 misra
* opensource |
4 years ago |
|
|
58f4962efd | opensource clean | 4 years ago |
|
|
7615edda06 |
!448 code check clean
Merge pull request !448 from 徐睿/r1.7.0 |
4 years ago |
|
|
d58d581e44 | codecheck clean | 4 years ago |
|
|
8caca8f7b6 |
!446 static check clean
Merge pull request !446 from 徐睿/r1.7.0 |
4 years ago |
|
|
bab166faf6 |
!447 Unique "using Status = uint32_t"
Merge pull request !447 from 张晓昆/c80comm |
4 years ago |
|
|
011ba36582 | Unique "using Status = uint32_t" | 4 years ago |
|
|
8934eb4b41 | opensource clean | 4 years ago |
|
|
069dac4ea8 | codecheck clean | 4 years ago |
|
|
620d774ce8 |
!443 Security issue modification
Merge pull request !443 from wangzhengjun/sec_fix_r170 |
4 years ago |
|
|
f66771df04 | code review | 4 years ago |
|
|
e76c8a1e40 |
!435 update owners
Merge pull request !435 from 王涛/r1.7.0 |
4 years ago |
|
|
d215e9fa37
|
update OWNERS. | 4 years ago |
|
|
d0a41996fc |
!425 ShuffleNetV1网络parser bugfix
Merge pull request !425 from huanruizhi/r1.7.0 |
4 years ago |
|
|
781fae2808 | parser bugfix | 4 years ago |
|
|
f4e55296a4
|
update .gitmodules. | 4 years ago |
| @@ -1,4 +1,4 @@ | |||||
| [submodule "metadef"] | [submodule "metadef"] | ||||
| path = metadef | path = metadef | ||||
| url = https://gitee.com/ascend/metadef.git | url = https://gitee.com/ascend/metadef.git | ||||
| branch = master | |||||
| branch = r1.7.0 | |||||
| @@ -1,8 +1,11 @@ | |||||
| approvers: | approvers: | ||||
| - ji_chen | - ji_chen | ||||
| - wqtshg | |||||
| - ljl0711 | |||||
| - liu-jisheng | |||||
| - liyihan123 | |||||
| - startzgf168 | |||||
| - qkunz | |||||
| reviewers: | reviewers: | ||||
| - xchu42 | - xchu42 | ||||
| - sheng-nan | - sheng-nan | ||||
| - wqtshg | |||||
| - ljl0711 | |||||
| - liu-jisheng | |||||
| @@ -1 +1 @@ | |||||
| Subproject commit 326ecbb2b4837699aa674cc30e9b9956e4fd364d | |||||
| Subproject commit 0b6395643fd358080d8d2a80868d09154d47b7e9 | |||||
| @@ -83,7 +83,7 @@ Status CaffeCustomParserAdapter::ParseWeights(const Message *op_src, ge::NodePtr | |||||
| GE_CHECK_NOTNULL(op); | GE_CHECK_NOTNULL(op); | ||||
| const LayerParameter *layer = reinterpret_cast<const LayerParameter *>(op_src); | const LayerParameter *layer = reinterpret_cast<const LayerParameter *>(op_src); | ||||
| GE_CHK_BOOL_RET_STATUS(nullptr != layer, FAILED, "[Convert][Type]Dynamic cast op_src to LayerParameter failed"); | |||||
| GE_CHK_BOOL_RET_STATUS(layer != nullptr, FAILED, "[Convert][Type]Dynamic cast op_src to LayerParameter failed"); | |||||
| GELOGI("layer: %s blobs_size: %d bottom_size: %d", layer->name().c_str(), layer->blobs_size(), layer->bottom_size()); | GELOGI("layer: %s blobs_size: %d bottom_size: %d", layer->name().c_str(), layer->blobs_size(), layer->bottom_size()); | ||||
| if (layer->blobs_size() == 0) { | if (layer->blobs_size() == 0) { | ||||
| return SUCCESS; | return SUCCESS; | ||||
| @@ -54,16 +54,16 @@ Status CaffeOpParser::ConvertWeight(const BlobProto &proto, const string &lay_na | |||||
| ConvertShape(proto, shape_vec); | ConvertShape(proto, shape_vec); | ||||
| ge::GeShape shape(shape_vec); | ge::GeShape shape(shape_vec); | ||||
| // Calculate the number of data in weight | // Calculate the number of data in weight | ||||
| int count = 1; | |||||
| int32_t count = 1; | |||||
| for (size_t i = 0; i < shape.GetDimNum(); ++i) { | for (size_t i = 0; i < shape.GetDimNum(); ++i) { | ||||
| int dim = shape.GetDim(i); | |||||
| int32_t dim = static_cast<int32_t>(shape.GetDim(i)); | |||||
| if (dim <= 0) { | if (dim <= 0) { | ||||
| REPORT_INNER_ERROR("E19999", "Convert weight fail, dim:%d of layer:%s <=0, check invalid", dim, lay_name.c_str()); | REPORT_INNER_ERROR("E19999", "Convert weight fail, dim:%d of layer:%s <=0, check invalid", dim, lay_name.c_str()); | ||||
| GELOGE(FAILED, "[Check][Size]Convert weight fail, dim:%d of layer:%s <=0, check invalid", dim, lay_name.c_str()); | GELOGE(FAILED, "[Check][Size]Convert weight fail, dim:%d of layer:%s <=0, check invalid", dim, lay_name.c_str()); | ||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| if (dim >= INT64_MAX / count) { | |||||
| if (dim >= INT32_MAX / count) { | |||||
| REPORT_INNER_ERROR("E19999", "Convert weight fail, shape:%s of layer:%s will overflow after multi", | REPORT_INNER_ERROR("E19999", "Convert weight fail, shape:%s of layer:%s will overflow after multi", | ||||
| shape.ToString().c_str(), lay_name.c_str()); | shape.ToString().c_str(), lay_name.c_str()); | ||||
| GELOGE(FAILED, "[Check][Size]Convert weight fail, Blob size exceeds INT64_MAX, dim:%d, count:%d, layer name:%s", | GELOGE(FAILED, "[Check][Size]Convert weight fail, Blob size exceeds INT64_MAX, dim:%d, count:%d, layer name:%s", | ||||
| @@ -78,6 +78,10 @@ using std::ifstream; | |||||
| } \ | } \ | ||||
| } while (0) | } while (0) | ||||
| namespace { | |||||
| const size_t kMaxErrStrLen = 128U; | |||||
| } // namespace | |||||
| namespace ge { | namespace ge { | ||||
| graphStatus aclgrphParseCaffe(const char *model_file, const char *weights_file, ge::Graph &graph) { | graphStatus aclgrphParseCaffe(const char *model_file, const char *weights_file, ge::Graph &graph) { | ||||
| ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser); | ErrorManager::GetInstance().SetStage(error_message::kModelCompile, error_message::kParser); | ||||
| @@ -247,7 +251,9 @@ Status CheckPathValid(const char *model_path, const string &custom_proto, string | |||||
| string &custom_proto_name) { | string &custom_proto_name) { | ||||
| string path_model = ge::parser::RealPath(model_path); | string path_model = ge::parser::RealPath(model_path); | ||||
| if (path_model.empty()) { | if (path_model.empty()) { | ||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19000", {"path", "errmsg"}, {model_path, strerror(errno)}); | |||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19000", {"path", "errmsg"}, {model_path, err_msg}); | |||||
| GELOGE(FAILED, "[Check][Param]ModelPath %s is Invalid path of model", model_path); | GELOGE(FAILED, "[Check][Param]ModelPath %s is Invalid path of model", model_path); | ||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| @@ -447,24 +453,30 @@ Status CaffeModelParser::CustomProtoParse(const char *model_path, const string & | |||||
| Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google::protobuf::Message *message) { | Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google::protobuf::Message *message) { | ||||
| int32_t copy_fd = mmDup(STDERR_FILENO); | int32_t copy_fd = mmDup(STDERR_FILENO); | ||||
| if (copy_fd < 0) { | if (copy_fd < 0) { | ||||
| REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", strerror(errno)); | |||||
| GELOGE(FAILED, "[Invoke][Dup] failed:%d, reason:%s", copy_fd, strerror(errno)); | |||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", err_msg); | |||||
| GELOGE(FAILED, "[Invoke][Dup] failed:%d, reason:%s", copy_fd, err_msg); | |||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| int32_t fd = mmOpen(kDevNull, M_RDWR); | int32_t fd = mmOpen(kDevNull, M_RDWR); | ||||
| if (fd < 0) { | if (fd < 0) { | ||||
| (void)mmClose(copy_fd); | (void)mmClose(copy_fd); | ||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {kDevNull, strerror(errno)}); | |||||
| GELOGE(FAILED, "[Open][File] %s failed. reason:%s", kDevNull, strerror(errno)); | |||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {kDevNull, err_msg}); | |||||
| GELOGE(FAILED, "[Open][File] %s failed. reason:%s", kDevNull, err_msg); | |||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| if (mmDup2(fd, STDERR_FILENO) < 0) { | if (mmDup2(fd, STDERR_FILENO) < 0) { | ||||
| (void)mmClose(fd); | (void)mmClose(fd); | ||||
| (void)mmClose(copy_fd); | (void)mmClose(copy_fd); | ||||
| REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", strerror(errno)); | |||||
| GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", strerror(errno)); | |||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", err_msg); | |||||
| GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", err_msg); | |||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| @@ -478,8 +490,10 @@ Status CaffeModelParser::ReadModelWithoutWarning(const char *model_path, google: | |||||
| if (mmDup2(copy_fd, STDERR_FILENO) < 0) { | if (mmDup2(copy_fd, STDERR_FILENO) < 0) { | ||||
| (void)mmClose(fd); | (void)mmClose(fd); | ||||
| (void)mmClose(copy_fd); | (void)mmClose(copy_fd); | ||||
| REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", strerror(errno)); | |||||
| GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", strerror(errno)); | |||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| REPORT_CALL_ERROR("E19999", "Duplicate to file STDERR_FILENO failed, errmsg:%s", err_msg); | |||||
| GELOGE(FAILED, "[Invoke][Dup2] Re-orient failed. reason:%s", err_msg); | |||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| (void)mmClose(fd); | (void)mmClose(fd); | ||||
| @@ -861,8 +875,7 @@ Status CaffeModelParser::AddNode(const domi::caffe::LayerParameter &layer, ge::C | |||||
| // AddConstInput is a function defined in caffe_op_parser, override in caffe_reshape_parser. | // AddConstInput is a function defined in caffe_op_parser, override in caffe_reshape_parser. | ||||
| std::shared_ptr<CaffeOpParser> caffe_op_parser = std::static_pointer_cast<CaffeOpParser>(op_parser); | std::shared_ptr<CaffeOpParser> caffe_op_parser = std::static_pointer_cast<CaffeOpParser>(op_parser); | ||||
| GE_CHECK_NOTNULL(caffe_op_parser); | GE_CHECK_NOTNULL(caffe_op_parser); | ||||
| Status status; | |||||
| status = caffe_op_parser->AddConstInput(node); | |||||
| Status status = caffe_op_parser->AddConstInput(node); | |||||
| if (status != SUCCESS) { | if (status != SUCCESS) { | ||||
| REPORT_CALL_ERROR("E19999", "AddConstInput failed for node:%s", node->GetOpDesc()->GetName().c_str()); | REPORT_CALL_ERROR("E19999", "AddConstInput failed for node:%s", node->GetOpDesc()->GetName().c_str()); | ||||
| GELOGE(FAILED, "[Add][ConstInput] to node %s fail.", node->GetOpDesc()->GetName().c_str()); | GELOGE(FAILED, "[Add][ConstInput] to node %s fail.", node->GetOpDesc()->GetName().c_str()); | ||||
| @@ -1101,14 +1114,14 @@ Status CaffeModelParser::AddUserOutNodesTop() { | |||||
| } | } | ||||
| Status CaffeModelParser::AddOutputTop(const domi::caffe::NetParameter &proto_message) { | Status CaffeModelParser::AddOutputTop(const domi::caffe::NetParameter &proto_message) { | ||||
| for (int32_t i = 0; i < proto_message.layer_size(); i++) { | |||||
| const domi::caffe::LayerParameter &layer = proto_message.layer(i); | |||||
| for (int32_t layer_index = 0; layer_index < proto_message.layer_size(); ++layer_index) { | |||||
| const domi::caffe::LayerParameter &layer = proto_message.layer(layer_index); | |||||
| if (!CheckValidLayer(layer)) { | if (!CheckValidLayer(layer)) { | ||||
| continue; | continue; | ||||
| } | } | ||||
| for (int i = 0; i < layer.top_size(); i++) { | |||||
| for (int32_t i = 0; i < layer.top_size(); i++) { | |||||
| string top = layer.top(i); | string top = layer.top(i); | ||||
| string top_origin = top; | string top_origin = top; | ||||
| // Handling 'inplace' scenarios | // Handling 'inplace' scenarios | ||||
| @@ -1134,7 +1147,7 @@ Status CaffeModelParser::AddOutputTop(const domi::caffe::NetParameter &proto_mes | |||||
| GELOGI("output in top_blob: %s", layer.name().c_str()); | GELOGI("output in top_blob: %s", layer.name().c_str()); | ||||
| if (top_node_iter != node_map.end()) { | if (top_node_iter != node_map.end()) { | ||||
| ge::GetParserContext().out_tensor_names.push_back(top_origin); | ge::GetParserContext().out_tensor_names.push_back(top_origin); | ||||
| ge::GetParserContext().default_out_nodes.push_back(std::make_pair(layer.name(), (int32_t)i)); | |||||
| ge::GetParserContext().default_out_nodes.push_back(std::make_pair(layer.name(), i)); | |||||
| GELOGI("The top of out node [%s] is [%s]", layer.name().c_str(), top_origin.c_str()); | GELOGI("The top of out node [%s] is [%s]", layer.name().c_str(), top_origin.c_str()); | ||||
| } | } | ||||
| } | } | ||||
| @@ -1261,8 +1274,8 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co | |||||
| std::map<std::string, std::vector<std::string>> layer_params_map; | std::map<std::string, std::vector<std::string>> layer_params_map; | ||||
| // same param name set <paramnames,layernames> | // same param name set <paramnames,layernames> | ||||
| // std::map<std::vector<std::string>, std::vector<std::string>> params_share_map; | // std::map<std::vector<std::string>, std::vector<std::string>> params_share_map; | ||||
| for (int32_t i = 0; i < layer_count; i++) { | |||||
| domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(i)); | |||||
| for (int32_t layer_index = 0; layer_index < layer_count; ++layer_index) { | |||||
| domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(layer_index)); | |||||
| GE_CHK_BOOL_EXEC_INFO(CheckValidLayer(layer), continue, | GE_CHK_BOOL_EXEC_INFO(CheckValidLayer(layer), continue, | ||||
| "[Check][Layer]layer phase is train, skip this layer, name:%s, type:%s.", | "[Check][Layer]layer phase is train, skip this layer, name:%s, type:%s.", | ||||
| @@ -1284,7 +1297,7 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co | |||||
| // Times accumulation of duplicate operators | // Times accumulation of duplicate operators | ||||
| layer_name_map[layer.name()]++; | layer_name_map[layer.name()]++; | ||||
| // Set the name in proto and layer | // Set the name in proto and layer | ||||
| domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(i); | |||||
| domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(layer_index); | |||||
| duplicate_name_layer->set_name(new_name); layer.set_name(new_name);) | duplicate_name_layer->set_name(new_name); layer.set_name(new_name);) | ||||
| // Insert the new operator name, the number of times of duplicate name is recorded as 1 | // Insert the new operator name, the number of times of duplicate name is recorded as 1 | ||||
| @@ -1300,7 +1313,7 @@ Status CaffeModelParser::ParseFromMemory(const char *data, uint32_t size, ge::Co | |||||
| // parse ParamSpec | // parse ParamSpec | ||||
| std::vector<string> v_param_names; | std::vector<string> v_param_names; | ||||
| for (int i = 0; i < layer.param_size(); i++) { | |||||
| for (int32_t i = 0; i < layer.param_size(); i++) { | |||||
| const domi::caffe::ParamSpec ¶m = layer.param(i); | const domi::caffe::ParamSpec ¶m = layer.param(i); | ||||
| GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name())); | GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name())); | ||||
| } | } | ||||
| @@ -1483,8 +1496,8 @@ Status CaffeModelParser::Parse(const char *model_path, ge::ComputeGraphPtr &grap | |||||
| // <layername,paramnames> | // <layername,paramnames> | ||||
| std::map<std::string, std::vector<std::string>> layer_params_map; | std::map<std::string, std::vector<std::string>> layer_params_map; | ||||
| // same param name set <paramnames,layernames> | // same param name set <paramnames,layernames> | ||||
| for (int32_t i = 0; i < layer_count; i++) { | |||||
| domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(i)); | |||||
| for (int32_t layer_index = 0; layer_index < layer_count; ++layer_index) { | |||||
| domi::caffe::LayerParameter &layer = const_cast<domi::caffe::LayerParameter &>(proto_message.layer(layer_index)); | |||||
| SaveOrigionLayerTops(layer); | SaveOrigionLayerTops(layer); | ||||
| GE_CHK_BOOL_EXEC_INFO(CheckValidLayer(layer), continue, | GE_CHK_BOOL_EXEC_INFO(CheckValidLayer(layer), continue, | ||||
| "[Check][Layer]layer phase is train, skip this layer, name:%s, type:%s.", | "[Check][Layer]layer phase is train, skip this layer, name:%s, type:%s.", | ||||
| @@ -1503,7 +1516,7 @@ Status CaffeModelParser::Parse(const char *model_path, ge::ComputeGraphPtr &grap | |||||
| // Times accumulation of duplicate operators | // Times accumulation of duplicate operators | ||||
| layer_name_map[layer.name()]++; | layer_name_map[layer.name()]++; | ||||
| // Set the name in proto and layer | // Set the name in proto and layer | ||||
| domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(i); | |||||
| domi::caffe::LayerParameter *duplicate_name_layer = proto_message.mutable_layer(layer_index); | |||||
| duplicate_name_layer->set_name(new_name); layer.set_name(new_name);) | duplicate_name_layer->set_name(new_name); layer.set_name(new_name);) | ||||
| // Insert the new operator name, the number of times of duplicate name is recorded as 1 | // Insert the new operator name, the number of times of duplicate name is recorded as 1 | ||||
| @@ -1519,7 +1532,7 @@ Status CaffeModelParser::Parse(const char *model_path, ge::ComputeGraphPtr &grap | |||||
| // parse ParamSpec | // parse ParamSpec | ||||
| std::vector<string> v_param_names; | std::vector<string> v_param_names; | ||||
| for (int i = 0; i < layer.param_size(); i++) { | |||||
| for (int32_t i = 0; i < layer.param_size(); i++) { | |||||
| const domi::caffe::ParamSpec ¶m = layer.param(i); | const domi::caffe::ParamSpec ¶m = layer.param(i); | ||||
| GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name())); | GE_IF_BOOL_EXEC((param.has_name()), v_param_names.emplace_back(param.name())); | ||||
| } | } | ||||
| @@ -2099,17 +2112,17 @@ Status CaffeWeightsParser::ConvertLayerParameter(const google::protobuf::Message | |||||
| ge::ComputeGraphPtr &graph) { | ge::ComputeGraphPtr &graph) { | ||||
| vector<string> need_share_layers; | vector<string> need_share_layers; | ||||
| const domi::caffe::LayerParameter *layer = reinterpret_cast<const domi::caffe::LayerParameter *>(layer_message); | const domi::caffe::LayerParameter *layer = reinterpret_cast<const domi::caffe::LayerParameter *>(layer_message); | ||||
| const string &layer_name = layer->name(); | |||||
| const string &shared_layer_name = layer->name(); | |||||
| const string &layer_type = layer->type(); | const string &layer_type = layer->type(); | ||||
| for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) { | for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) { | ||||
| if (find(p_iter->second.begin(), p_iter->second.end(), layer_name) != p_iter->second.end()) { | |||||
| GELOGI("layer:%s need share weights !", layer_name.c_str()); | |||||
| if (find(p_iter->second.begin(), p_iter->second.end(), shared_layer_name) != p_iter->second.end()) { | |||||
| GELOGI("layer:%s need share weights !", shared_layer_name.c_str()); | |||||
| need_share_layers = p_iter->second; | need_share_layers = p_iter->second; | ||||
| } | } | ||||
| } | } | ||||
| if (need_share_layers.size() == 0) { | if (need_share_layers.size() == 0) { | ||||
| need_share_layers.push_back(layer_name); | |||||
| need_share_layers.push_back(shared_layer_name); | |||||
| } | } | ||||
| for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) { | for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) { | ||||
| @@ -2216,27 +2229,27 @@ Status CaffeWeightsParser::ConvertNetParameter(const NetParameter ¶m, ge::Co | |||||
| for (int i = 0; i < num_layer; ++i) { | for (int i = 0; i < num_layer; ++i) { | ||||
| const LayerParameter &layer = param.layer(i); | const LayerParameter &layer = param.layer(i); | ||||
| const string &layer_name = layer.name(); | |||||
| const string ¶m_layer_name = layer.name(); | |||||
| // Skip some layer types | // Skip some layer types | ||||
| if (skiped_layer_type_.find(layer.type()) != skiped_layer_type_.end()) { | if (skiped_layer_type_.find(layer.type()) != skiped_layer_type_.end()) { | ||||
| GELOGI("Skip layer %s", layer_name.c_str()); | |||||
| GELOGI("Skip layer %s", param_layer_name.c_str()); | |||||
| continue; | continue; | ||||
| } | } | ||||
| GELOGI("Parse layer %s", layer_name.c_str()); | |||||
| GELOGI("Parse layer %s", param_layer_name.c_str()); | |||||
| vector<string> need_share_layers; | vector<string> need_share_layers; | ||||
| for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) { | for (auto p_iter = params_share_map.begin(); p_iter != params_share_map.end(); ++p_iter) { | ||||
| if (find(p_iter->second.begin(), p_iter->second.end(), layer_name) != p_iter->second.end()) { | |||||
| GELOGI("Layer: %s need share weights !", layer_name.c_str()); | |||||
| if (find(p_iter->second.begin(), p_iter->second.end(), param_layer_name) != p_iter->second.end()) { | |||||
| GELOGI("Layer: %s need share weights !", param_layer_name.c_str()); | |||||
| need_share_layers = p_iter->second; | need_share_layers = p_iter->second; | ||||
| } | } | ||||
| } | } | ||||
| if (need_share_layers.size() == 0) { | if (need_share_layers.size() == 0) { | ||||
| need_share_layers.push_back(layer_name); | |||||
| need_share_layers.push_back(param_layer_name); | |||||
| } | } | ||||
| for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) { | for (auto share_iter = need_share_layers.begin(); share_iter != need_share_layers.end(); ++share_iter) { | ||||
| @@ -50,6 +50,7 @@ using std::set; | |||||
| using std::string; | using std::string; | ||||
| using std::unordered_map; | using std::unordered_map; | ||||
| using std::vector; | using std::vector; | ||||
| using domi::Status; | |||||
| static std::map<std::vector<std::string>, std::vector<std::string>> params_share_map; | static std::map<std::vector<std::string>, std::vector<std::string>> params_share_map; | ||||
| class PARSER_FUNC_VISIBILITY CaffeModelParser : public domi::ModelParser { | class PARSER_FUNC_VISIBILITY CaffeModelParser : public domi::ModelParser { | ||||
| @@ -46,6 +46,7 @@ using google::protobuf::io::ZeroCopyInputStream; | |||||
| using namespace ge::parser; | using namespace ge::parser; | ||||
| namespace { | namespace { | ||||
| const size_t kMaxErrStrLen = 128U; | |||||
| const std::string kGraphDefaultName = "domi_default"; | const std::string kGraphDefaultName = "domi_default"; | ||||
| /// The maximum length of the file. | /// The maximum length of the file. | ||||
| /// Based on the security coding specification and the current actual (protobuf) model size, it is determined as 2G-1 | /// Based on the security coding specification and the current actual (protobuf) model size, it is determined as 2G-1 | ||||
| @@ -374,7 +375,7 @@ domi::Status AclGrphParseUtil::ParseAclEnableScope(const string &enable_scope_fu | |||||
| } | } | ||||
| void AclGrphParseUtil::AddAttrsForInputNodes(const vector<string> &adjust_fp16_format_vec, | void AclGrphParseUtil::AddAttrsForInputNodes(const vector<string> &adjust_fp16_format_vec, | ||||
| const string &fp16_nodes_name, uint32_t index, OpDescPtr &op_desc) { | |||||
| const string &fp16_nodes_name, size_t index, OpDescPtr &op_desc) { | |||||
| if (AttrUtils::SetStr(op_desc, ATTR_ATC_USER_DEFINE_DATATYPE, TypeUtils::DataTypeToSerialString(DT_FLOAT16))) { | if (AttrUtils::SetStr(op_desc, ATTR_ATC_USER_DEFINE_DATATYPE, TypeUtils::DataTypeToSerialString(DT_FLOAT16))) { | ||||
| if ((index < adjust_fp16_format_vec.size()) && (adjust_fp16_format_vec[index] == "true")) { | if ((index < adjust_fp16_format_vec.size()) && (adjust_fp16_format_vec[index] == "true")) { | ||||
| GELOGI("This node [%s] should be set NC1HWC0", fp16_nodes_name.c_str()); | GELOGI("This node [%s] should be set NC1HWC0", fp16_nodes_name.c_str()); | ||||
| @@ -405,7 +406,7 @@ domi::Status AclGrphParseUtil::ParseAclInputFp16Nodes(const ComputeGraphPtr &gra | |||||
| } | } | ||||
| GELOGI("The input_fp16_nodes is set %s", input_fp16_nodes.c_str()); | GELOGI("The input_fp16_nodes is set %s", input_fp16_nodes.c_str()); | ||||
| vector<string> input_fp16_nodes_vec = StringUtils::Split(input_fp16_nodes, ';'); | vector<string> input_fp16_nodes_vec = StringUtils::Split(input_fp16_nodes, ';'); | ||||
| for (uint32_t i = 0; i < input_fp16_nodes_vec.size(); ++i) { | |||||
| for (size_t i = 0; i < input_fp16_nodes_vec.size(); ++i) { | |||||
| ge::NodePtr node = graph->FindNode(input_fp16_nodes_vec[i]); | ge::NodePtr node = graph->FindNode(input_fp16_nodes_vec[i]); | ||||
| if (node == nullptr) { | if (node == nullptr) { | ||||
| ErrorManager::GetInstance().ATCReportErrMessage("E10016", {"parameter", "opname"}, | ErrorManager::GetInstance().ATCReportErrMessage("E10016", {"parameter", "opname"}, | ||||
| @@ -494,12 +495,12 @@ domi::Status AclGrphParseUtil::GetDefaultOutInfo(ge::ComputeGraphPtr &compute_gr | |||||
| std::vector<std::pair<ge::NodePtr, int32_t>> &output_nodes_info) { | std::vector<std::pair<ge::NodePtr, int32_t>> &output_nodes_info) { | ||||
| std::vector<std::pair<std::string, int32_t>> default_out_nodes = ge::GetParserContext().default_out_nodes; | std::vector<std::pair<std::string, int32_t>> default_out_nodes = ge::GetParserContext().default_out_nodes; | ||||
| if (!default_out_nodes.empty()) { | if (!default_out_nodes.empty()) { | ||||
| for (uint32_t i = 0; i < default_out_nodes.size(); ++i) { | |||||
| for (size_t i = 0; i < default_out_nodes.size(); ++i) { | |||||
| ge::NodePtr out_node = compute_graph->FindNode(default_out_nodes[i].first); | ge::NodePtr out_node = compute_graph->FindNode(default_out_nodes[i].first); | ||||
| if (out_node == nullptr) { | if (out_node == nullptr) { | ||||
| ErrorManager::GetInstance().ATCReportErrMessage("E10016", {"parameter", "opname"}, | ErrorManager::GetInstance().ATCReportErrMessage("E10016", {"parameter", "opname"}, | ||||
| {"out_nodes", default_out_nodes[i].first}); | {"out_nodes", default_out_nodes[i].first}); | ||||
| GELOGE(domi::FAILED, "[Check][Param] Can not find out_nodes(%d) (%s) in graph.", | |||||
| GELOGE(domi::FAILED, "[Check][Param] Can not find out_nodes(%zu) (%s) in graph.", | |||||
| i, default_out_nodes[i].first.c_str()); | i, default_out_nodes[i].first.c_str()); | ||||
| return domi::FAILED; | return domi::FAILED; | ||||
| } | } | ||||
| @@ -692,16 +693,17 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY long GetFileLength(const std::s | |||||
| return -1, "[Check][Param] input_file path is null."); | return -1, "[Check][Param] input_file path is null."); | ||||
| std::string real_path = RealPath(input_file.c_str()); | std::string real_path = RealPath(input_file.c_str()); | ||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), | GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), | ||||
| REPORT_INPUT_ERROR("E19000", std::vector<std::string>({"path", "errmsg"}), | REPORT_INPUT_ERROR("E19000", std::vector<std::string>({"path", "errmsg"}), | ||||
| std::vector<std::string>({real_path, strerror(errno)})); | |||||
| std::vector<std::string>({real_path, err_msg})); | |||||
| return -1, "[Get][Path] input_file path '%s' not valid", input_file.c_str()); | return -1, "[Get][Path] input_file path '%s' not valid", input_file.c_str()); | ||||
| unsigned long long file_length = 0; | unsigned long long file_length = 0; | ||||
| GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(mmGetFileSize(input_file.c_str(), &file_length) != EN_OK, | GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(mmGetFileSize(input_file.c_str(), &file_length) != EN_OK, | ||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, | ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, | ||||
| {input_file, strerror(errno)}); | |||||
| return -1, "[Open][File] [%s] failed. %s", input_file.c_str(), strerror(errno)); | |||||
| {input_file, err_msg}); | |||||
| return -1, "[Open][File] [%s] failed. %s", input_file.c_str(), err_msg); | |||||
| GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file_length == 0 || file_length > kMaxFileSizeLimit), | GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file_length == 0 || file_length > kMaxFileSizeLimit), | ||||
| REPORT_INPUT_ERROR( | REPORT_INPUT_ERROR( | ||||
| @@ -829,11 +831,13 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadProtoFromText(const ch | |||||
| "[Check][Param]incorrect parameter. nullptr == file || nullptr == message"); | "[Check][Param]incorrect parameter. nullptr == file || nullptr == message"); | ||||
| std::string real_path = RealPath(file); | std::string real_path = RealPath(file); | ||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), | GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), | ||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19000", {"path", "errmsg"}, | ErrorManager::GetInstance().ATCReportErrMessage("E19000", {"path", "errmsg"}, | ||||
| {file, strerror(errno)}); | |||||
| {file, err_msg}); | |||||
| return false, "[Check][Param]Path[%s]'s realpath is empty, errmsg[%s]", file, | return false, "[Check][Param]Path[%s]'s realpath is empty, errmsg[%s]", file, | ||||
| strerror(errno)); | |||||
| err_msg); | |||||
| GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetFileLength(real_path) == -1, return false, "[Check][Param] file size not valid."); | GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetFileLength(real_path) == -1, return false, "[Check][Param] file size not valid."); | ||||
| @@ -57,7 +57,7 @@ class AclGrphParseUtil { | |||||
| domi::Status ParseAclOutputFp16NodesFormat(const std::string &is_output_fp16); | domi::Status ParseAclOutputFp16NodesFormat(const std::string &is_output_fp16); | ||||
| domi::Status ParseAclEnableScope(const std::string &enable_scope_fusion_passes); | domi::Status ParseAclEnableScope(const std::string &enable_scope_fusion_passes); | ||||
| static void AddAttrsForInputNodes(const vector<string> &adjust_fp16_format_vec, const string &fp16_nodes_name, | static void AddAttrsForInputNodes(const vector<string> &adjust_fp16_format_vec, const string &fp16_nodes_name, | ||||
| uint32_t index, OpDescPtr &op_desc); | |||||
| size_t index, OpDescPtr &op_desc); | |||||
| domi::Status ParseAclInputFp16Nodes(const ComputeGraphPtr &graph, const string &input_fp16_nodes, | domi::Status ParseAclInputFp16Nodes(const ComputeGraphPtr &graph, const string &input_fp16_nodes, | ||||
| const string &is_input_adjust_hw_layout); | const string &is_input_adjust_hw_layout); | ||||
| domi::Status GetDefaultOutInfo(ge::ComputeGraphPtr &compute_graph, | domi::Status GetDefaultOutInfo(ge::ComputeGraphPtr &compute_graph, | ||||
| @@ -157,7 +157,7 @@ bool ValidateStr(const std::string &filePath, const std::string &mode); | |||||
| std::string CurrentTimeInStr(); | std::string CurrentTimeInStr(); | ||||
| template <typename T, typename... Args> | template <typename T, typename... Args> | ||||
| static inline std::shared_ptr<T> MakeShared(Args &&... args) { | |||||
| inline std::shared_ptr<T> MakeShared(Args &&... args) { | |||||
| using T_nc = typename std::remove_const<T>::type; | using T_nc = typename std::remove_const<T>::type; | ||||
| std::shared_ptr<T> ret(new (std::nothrow) T_nc(std::forward<Args>(args)...)); | std::shared_ptr<T> ret(new (std::nothrow) T_nc(std::forward<Args>(args)...)); | ||||
| return ret; | return ret; | ||||
| @@ -25,6 +25,7 @@ | |||||
| #include "mmpa/mmpa_api.h" | #include "mmpa/mmpa_api.h" | ||||
| namespace { | namespace { | ||||
| const size_t kMaxErrStrLen = 128U; | |||||
| const int kFileOpSuccess = 0; | const int kFileOpSuccess = 0; | ||||
| } // namespace | } // namespace | ||||
| @@ -65,8 +66,10 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelSaver::SaveJsonToFi | |||||
| mode_t mode = S_IRUSR | S_IWUSR; | mode_t mode = S_IRUSR | S_IWUSR; | ||||
| int32_t fd = mmOpen2(real_path, O_RDWR | O_CREAT | O_TRUNC, mode); | int32_t fd = mmOpen2(real_path, O_RDWR | O_CREAT | O_TRUNC, mode); | ||||
| if (fd == EN_ERROR || fd == EN_INVALID_PARAM) { | if (fd == EN_ERROR || fd == EN_INVALID_PARAM) { | ||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {file_path, strerror(errno)}); | |||||
| GELOGE(FAILED, "[Open][File] [%s] failed. %s", file_path, strerror(errno)); | |||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| ErrorManager::GetInstance().ATCReportErrMessage("E19001", {"file", "errmsg"}, {file_path, err_msg}); | |||||
| GELOGE(FAILED, "[Open][File] [%s] failed. %s", file_path, err_msg); | |||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| const char *model_char = model_str.c_str(); | const char *model_char = model_str.c_str(); | ||||
| @@ -74,16 +77,20 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelSaver::SaveJsonToFi | |||||
| // Write data to file | // Write data to file | ||||
| mmSsize_t mmpa_ret = mmWrite(fd, const_cast<void *>((const void *)model_char), len); | mmSsize_t mmpa_ret = mmWrite(fd, const_cast<void *>((const void *)model_char), len); | ||||
| if (mmpa_ret == EN_ERROR || mmpa_ret == EN_INVALID_PARAM) { | if (mmpa_ret == EN_ERROR || mmpa_ret == EN_INVALID_PARAM) { | ||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| ErrorManager::GetInstance().ATCReportErrMessage( | ErrorManager::GetInstance().ATCReportErrMessage( | ||||
| "E19004", {"file", "errmsg"}, {file_path, strerror(errno)}); | |||||
| "E19004", {"file", "errmsg"}, {file_path, err_msg}); | |||||
| // Need to both print the error info of mmWrite and mmClose, so return ret after mmClose | // Need to both print the error info of mmWrite and mmClose, so return ret after mmClose | ||||
| GELOGE(FAILED, "[WriteTo][File] %s failed. errno = %ld, %s", file_path, mmpa_ret, strerror(errno)); | |||||
| GELOGE(FAILED, "[WriteTo][File] %s failed. errno = %ld, %s", file_path, mmpa_ret, err_msg); | |||||
| ret = FAILED; | ret = FAILED; | ||||
| } | } | ||||
| // Close file | // Close file | ||||
| if (mmClose(fd) != EN_OK) { | if (mmClose(fd) != EN_OK) { | ||||
| REPORT_INNER_ERROR("E19999", "close file:%s failed. errmsg:%s", file_path, strerror(errno)); | |||||
| GELOGE(FAILED, "[Close][File] %s failed. errmsg:%s", file_path, strerror(errno)); | |||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| REPORT_INNER_ERROR("E19999", "close file:%s failed. errmsg:%s", file_path, err_msg); | |||||
| GELOGE(FAILED, "[Close][File] %s failed. errmsg:%s", file_path, err_msg); | |||||
| ret = FAILED; | ret = FAILED; | ||||
| } | } | ||||
| return ret; | return ret; | ||||
| @@ -137,11 +144,13 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY int ModelSaver::CreateDirectory | |||||
| int32_t ret = mmMkdir(tmp_dir_path, S_IRUSR | S_IWUSR | S_IXUSR); // 700 | int32_t ret = mmMkdir(tmp_dir_path, S_IRUSR | S_IWUSR | S_IXUSR); // 700 | ||||
| if (ret != 0) { | if (ret != 0) { | ||||
| if (errno != EEXIST) { | if (errno != EEXIST) { | ||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| REPORT_CALL_ERROR("E19999", | REPORT_CALL_ERROR("E19999", | ||||
| "Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | "Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | ||||
| directory_path.c_str(), strerror(errno)); | |||||
| directory_path.c_str(), err_msg); | |||||
| GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | ||||
| directory_path.c_str(), strerror(errno)); | |||||
| directory_path.c_str(), err_msg); | |||||
| return ret; | return ret; | ||||
| } | } | ||||
| } | } | ||||
| @@ -151,11 +160,13 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY int ModelSaver::CreateDirectory | |||||
| int32_t ret = mmMkdir(const_cast<char *>(directory_path.c_str()), S_IRUSR | S_IWUSR | S_IXUSR); // 700 | int32_t ret = mmMkdir(const_cast<char *>(directory_path.c_str()), S_IRUSR | S_IWUSR | S_IXUSR); // 700 | ||||
| if (ret != 0) { | if (ret != 0) { | ||||
| if (errno != EEXIST) { | if (errno != EEXIST) { | ||||
| char_t err_buf[kMaxErrStrLen + 1U] = {}; | |||||
| const auto err_msg = mmGetErrorFormatMessage(mmGetErrorCode(), &err_buf[0], kMaxErrStrLen); | |||||
| REPORT_CALL_ERROR("E19999", | REPORT_CALL_ERROR("E19999", | ||||
| "Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | "Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | ||||
| directory_path.c_str(), strerror(errno)); | |||||
| directory_path.c_str(), err_msg); | |||||
| GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | GELOGW("Can not create directory %s. Make sure the directory exists and writable. errmsg:%s", | ||||
| directory_path.c_str(), strerror(errno)); | |||||
| directory_path.c_str(), err_msg); | |||||
| return ret; | return ret; | ||||
| } | } | ||||
| } | } | ||||
| @@ -23,11 +23,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ArgOpOperator::ArgOpOperator() | |||||
| ArgOpOperator::~ArgOpOperator() {} | ArgOpOperator::~ArgOpOperator() {} | ||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ArgOpOperator &ArgOpOperator::Name(const std::string &name) { | |||||
| (void)ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ArgOpOperator &ArgOpOperator::Index(int64_t index) { | FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ArgOpOperator &ArgOpOperator::Index(int64_t index) { | ||||
| Attr("index", static_cast<int64_t>(index)); | Attr("index", static_cast<int64_t>(index)); | ||||
| @@ -25,8 +25,6 @@ class ArgOpOperator : public ParserOperator { | |||||
| ~ArgOpOperator() override; | ~ArgOpOperator() override; | ||||
| ArgOpOperator &Name(const std::string &name); | |||||
| ArgOpOperator &Index(int64_t index); | ArgOpOperator &Index(int64_t index); | ||||
| int64_t GetIndex() const; | int64_t GetIndex() const; | ||||
| @@ -25,11 +25,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator::ConstantOpera | |||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator::~ConstantOperator() {} | FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator::~ConstantOperator() {} | ||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator &ConstantOperator::Name(const std::string &name) { | |||||
| ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator &ConstantOperator::VectorAttr( | FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ConstantOperator &ConstantOperator::VectorAttr( | ||||
| std::string key, std::vector<int64_t> &value) { | std::string key, std::vector<int64_t> &value) { | ||||
| Attr(key, value); | Attr(key, value); | ||||
| @@ -26,7 +26,6 @@ class ConstantOperator : public ParserOperator { | |||||
| ConstantOperator(); | ConstantOperator(); | ||||
| ~ConstantOperator() override; | ~ConstantOperator() override; | ||||
| ConstantOperator &Name(const std::string &name); | |||||
| ConstantOperator &VectorAttr(std::string key, std::vector<int64_t> &value); | ConstantOperator &VectorAttr(std::string key, std::vector<int64_t> &value); | ||||
| ConstantOperator &DType(ge::DataType t); | ConstantOperator &DType(ge::DataType t); | ||||
| @@ -24,12 +24,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY FrameworkOpOperator::FrameworkO | |||||
| FrameworkOpOperator::~FrameworkOpOperator() {} | FrameworkOpOperator::~FrameworkOpOperator() {} | ||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY FrameworkOpOperator &FrameworkOpOperator::Name( | |||||
| const std::string &name) { | |||||
| ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY FrameworkOpOperator &FrameworkOpOperator::Index(int64_t index) { | FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY FrameworkOpOperator &FrameworkOpOperator::Index(int64_t index) { | ||||
| Attr(RETVAL_ATTR_NAME_INDEX, static_cast<int64_t>(index)); | Attr(RETVAL_ATTR_NAME_INDEX, static_cast<int64_t>(index)); | ||||
| return *this; | return *this; | ||||
| @@ -26,8 +26,6 @@ class FrameworkOpOperator : public ParserOperator { | |||||
| ~FrameworkOpOperator() override; | ~FrameworkOpOperator() override; | ||||
| FrameworkOpOperator &Name(const std::string &name); | |||||
| FrameworkOpOperator &OriginalType(const std::string &type); | FrameworkOpOperator &OriginalType(const std::string &type); | ||||
| FrameworkOpOperator &NodeDefPkg(const std::string &nodedef_pkg); | FrameworkOpOperator &NodeDefPkg(const std::string &nodedef_pkg); | ||||
| @@ -22,9 +22,4 @@ namespace ge { | |||||
| FMK_FUNC_HOST_VISIBILITY NoOpOperator::NoOpOperator() : ParserOperator("NoOp") {} | FMK_FUNC_HOST_VISIBILITY NoOpOperator::NoOpOperator() : ParserOperator("NoOp") {} | ||||
| FMK_FUNC_HOST_VISIBILITY NoOpOperator::~NoOpOperator() {} | FMK_FUNC_HOST_VISIBILITY NoOpOperator::~NoOpOperator() {} | ||||
| FMK_FUNC_HOST_VISIBILITY NoOpOperator &NoOpOperator::Name(const std::string &name) { | |||||
| ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| } // namespace ge | } // namespace ge | ||||
| @@ -25,8 +25,6 @@ class NoOpOperator : public ParserOperator { | |||||
| public: | public: | ||||
| NoOpOperator(); | NoOpOperator(); | ||||
| ~NoOpOperator() override; | ~NoOpOperator() override; | ||||
| NoOpOperator &Name(const std::string &name); | |||||
| }; | }; | ||||
| } // namespace ge | } // namespace ge | ||||
| @@ -45,7 +45,7 @@ class FMK_FUNC_HOST_VISIBILITY ParserOperator { | |||||
| ParserOperator &AttrVector(std::string key, std::vector<int32_t> &value); | ParserOperator &AttrVector(std::string key, std::vector<int32_t> &value); | ||||
| ParserOperator &AttrVector(std::string key, std::vector<int64_t> &value); | ParserOperator &AttrVector(std::string key, std::vector<int64_t> &value); | ||||
| ParserOperator &Name(const std::string &name); | |||||
| virtual ParserOperator &Name(const std::string &name); | |||||
| ParserOperator &Type(const std::string &type); | ParserOperator &Type(const std::string &type); | ||||
| @@ -22,11 +22,6 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator::RefSwitchOpe | |||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator::~RefSwitchOperator() {} | FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator::~RefSwitchOperator() {} | ||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator &RefSwitchOperator::Name(const std::string &name) { | |||||
| ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator &RefSwitchOperator::T(ge::DataType t) { | FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY RefSwitchOperator &RefSwitchOperator::T(ge::DataType t) { | ||||
| Attr("T", (int64_t)t); | Attr("T", (int64_t)t); | ||||
| return *this; | return *this; | ||||
| @@ -25,8 +25,6 @@ class RefSwitchOperator : public ParserOperator { | |||||
| public: | public: | ||||
| RefSwitchOperator(); | RefSwitchOperator(); | ||||
| ~RefSwitchOperator() override; | ~RefSwitchOperator() override; | ||||
| RefSwitchOperator &Name(const std::string &name); | |||||
| RefSwitchOperator &T(ge::DataType t); | RefSwitchOperator &T(ge::DataType t); | ||||
| }; | }; | ||||
| } // namespace ge | } // namespace ge | ||||
| @@ -24,11 +24,6 @@ FMK_FUNC_HOST_VISIBILITY ShapeNOperator::ShapeNOperator() : ParserOperator("Shap | |||||
| FMK_FUNC_HOST_VISIBILITY ShapeNOperator::~ShapeNOperator() {} | FMK_FUNC_HOST_VISIBILITY ShapeNOperator::~ShapeNOperator() {} | ||||
| FMK_FUNC_HOST_VISIBILITY ShapeNOperator &ShapeNOperator::Name(const std::string &name) { | |||||
| ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| FMK_FUNC_HOST_VISIBILITY ShapeNOperator &ShapeNOperator::N(int64_t n) { | FMK_FUNC_HOST_VISIBILITY ShapeNOperator &ShapeNOperator::N(int64_t n) { | ||||
| Attr(SHAPEN_ATTR_N, n); | Attr(SHAPEN_ATTR_N, n); | ||||
| return *this; | return *this; | ||||
| @@ -26,8 +26,6 @@ class ShapeNOperator : public ParserOperator { | |||||
| ShapeNOperator(); | ShapeNOperator(); | ||||
| ~ShapeNOperator() override; | ~ShapeNOperator() override; | ||||
| ShapeNOperator &Name(const std::string &name); | |||||
| ShapeNOperator &N(int64_t n); | ShapeNOperator &N(int64_t n); | ||||
| int64_t GetN() const; | int64_t GetN() const; | ||||
| ShapeNOperator &InType(ge::DataType t); | ShapeNOperator &InType(ge::DataType t); | ||||
| @@ -24,11 +24,6 @@ VarIsInitializedOpOperator::VarIsInitializedOpOperator() : ParserOperator(ge::pa | |||||
| VarIsInitializedOpOperator::~VarIsInitializedOpOperator() {} | VarIsInitializedOpOperator::~VarIsInitializedOpOperator() {} | ||||
| VarIsInitializedOpOperator &VarIsInitializedOpOperator::Name(const std::string &name) { | |||||
| ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| VarIsInitializedOpOperator &VarIsInitializedOpOperator::VectorAttr(const std::string &key, | VarIsInitializedOpOperator &VarIsInitializedOpOperator::VectorAttr(const std::string &key, | ||||
| std::vector<int64_t> &value) { | std::vector<int64_t> &value) { | ||||
| Attr(key, value); | Attr(key, value); | ||||
| @@ -26,7 +26,6 @@ class VarIsInitializedOpOperator : public ParserOperator { | |||||
| VarIsInitializedOpOperator(); | VarIsInitializedOpOperator(); | ||||
| ~VarIsInitializedOpOperator() override; | ~VarIsInitializedOpOperator() override; | ||||
| VarIsInitializedOpOperator &Name(const std::string &name); | |||||
| VarIsInitializedOpOperator &VectorAttr(const std::string &key, std::vector<int64_t> &value); | VarIsInitializedOpOperator &VectorAttr(const std::string &key, std::vector<int64_t> &value); | ||||
| }; | }; | ||||
| } // namespace ge | } // namespace ge | ||||
| @@ -23,11 +23,6 @@ VariableOperator::VariableOperator() : ParserOperator(ge::parser::VARIABLE) {} | |||||
| VariableOperator::~VariableOperator() {} | VariableOperator::~VariableOperator() {} | ||||
| VariableOperator &VariableOperator::Name(const std::string &name) { | |||||
| ParserOperator::Name(name); | |||||
| return *this; | |||||
| } | |||||
| VariableOperator &VariableOperator::Container(const std::string &container) { | VariableOperator &VariableOperator::Container(const std::string &container) { | ||||
| Attr(VAR_ATTR_CONTAINER, container); | Attr(VAR_ATTR_CONTAINER, container); | ||||
| return *this; | return *this; | ||||
| @@ -27,8 +27,6 @@ class VariableOperator : public ParserOperator { | |||||
| VariableOperator(); | VariableOperator(); | ||||
| ~VariableOperator() override; | ~VariableOperator() override; | ||||
| VariableOperator &Name(const std::string &name); | |||||
| VariableOperator &Container(const std::string &container); | VariableOperator &Container(const std::string &container); | ||||
| VariableOperator &SharedName(const std::string &sharedname); | VariableOperator &SharedName(const std::string &sharedname); | ||||
| @@ -675,8 +675,7 @@ static uint16_t Fp16Div(uint16_t v_1, uint16_t v_2) { | |||||
| uint64_t m_tmp; | uint64_t m_tmp; | ||||
| if (e_a > e_b) { | if (e_a > e_b) { | ||||
| m_tmp = m_a; | m_tmp = m_a; | ||||
| uint16_t tmp; | |||||
| tmp = e_a - e_b; | |||||
| uint16_t tmp = e_a - e_b; | |||||
| for (int i = 0; i < tmp; i++) { | for (int i = 0; i < tmp; i++) { | ||||
| m_tmp = m_tmp << 1; | m_tmp = m_tmp << 1; | ||||
| } | } | ||||
| @@ -690,8 +689,7 @@ static uint16_t Fp16Div(uint16_t v_1, uint16_t v_2) { | |||||
| m_b = m_tmp; | m_b = m_tmp; | ||||
| } | } | ||||
| m_div = static_cast<float>(m_a * 1.0f / m_b); | m_div = static_cast<float>(m_a * 1.0f / m_b); | ||||
| fp16_t fp_div; | |||||
| fp_div = m_div; | |||||
| fp16_t fp_div = m_div; | |||||
| ret = fp_div.val; | ret = fp_div.val; | ||||
| if (s_a != s_b) { | if (s_a != s_b) { | ||||
| ret |= kFp16SignMask; | ret |= kFp16SignMask; | ||||
| @@ -212,8 +212,7 @@ Status ParserUtils::HandleInputContext(const NodePtr &node, | |||||
| // add control edge | // add control edge | ||||
| if (node->GetInControlAnchor() != nullptr) { | if (node->GetInControlAnchor() != nullptr) { | ||||
| for (const auto &out_anchor : node->GetInControlAnchor()->GetPeerAnchors()) { | for (const auto &out_anchor : node->GetInControlAnchor()->GetPeerAnchors()) { | ||||
| graphStatus ret = GraphUtils::AddEdge(out_anchor, peer_in_anchor->GetOwnerNode()->GetInControlAnchor()); | |||||
| if (ret != GRAPH_SUCCESS) { | |||||
| if (GraphUtils::AddEdge(out_anchor, peer_in_anchor->GetOwnerNode()->GetInControlAnchor()) != GRAPH_SUCCESS) { | |||||
| REPORT_CALL_ERROR("E19999", "add control edge from %s to %s failed.", | REPORT_CALL_ERROR("E19999", "add control edge from %s to %s failed.", | ||||
| out_anchor->GetOwnerNode()->GetName().c_str(), | out_anchor->GetOwnerNode()->GetName().c_str(), | ||||
| peer_in_anchor->GetOwnerNode()->GetName().c_str()); | peer_in_anchor->GetOwnerNode()->GetName().c_str()); | ||||
| @@ -30,6 +30,7 @@ | |||||
| #include <memory> | #include <memory> | ||||
| #include <string> | #include <string> | ||||
| #include "external/ge/ge_api_types.h" | |||||
| #include "common/util/error_manager/error_manager.h" | #include "common/util/error_manager/error_manager.h" | ||||
| #include "framework/common/debug/ge_log.h" | #include "framework/common/debug/ge_log.h" | ||||
| #include "framework/common/string_util.h" | #include "framework/common/string_util.h" | ||||
| @@ -40,10 +41,6 @@ | |||||
| namespace ge { | namespace ge { | ||||
| std::map<string, string> TBEPluginLoader::options_ = {}; | std::map<string, string> TBEPluginLoader::options_ = {}; | ||||
| namespace { | |||||
| const std::string FRAMEWORK_TYPE = "ge.frameworkType"; | |||||
| } | |||||
| // Get Singleton Instance | // Get Singleton Instance | ||||
| FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY TBEPluginLoader &TBEPluginLoader::Instance() { | FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY TBEPluginLoader &TBEPluginLoader::Instance() { | ||||
| static TBEPluginLoader instance_ptr_; | static TBEPluginLoader instance_ptr_; | ||||
| @@ -20,6 +20,8 @@ | |||||
| #include "framework/common/debug/ge_log.h" | #include "framework/common/debug/ge_log.h" | ||||
| #include "parser/common/op_parser_factory.h" | #include "parser/common/op_parser_factory.h" | ||||
| #include "register/op_registry.h" | #include "register/op_registry.h" | ||||
| #include "parser/common/parser_utils.h" | |||||
| #include "graph/def_types.h" | |||||
| using domi::ONNX; | using domi::ONNX; | ||||
| using domi::ParseParamByOpFunc; | using domi::ParseParamByOpFunc; | ||||
| @@ -28,7 +30,7 @@ using domi::ParseParamFunc; | |||||
| namespace ge { | namespace ge { | ||||
| Status OnnxCustomParserAdapter::ParseParams(const Message *op_src, ge::Operator &op_dest) { | Status OnnxCustomParserAdapter::ParseParams(const Message *op_src, ge::Operator &op_dest) { | ||||
| GE_CHECK_NOTNULL(op_src); | GE_CHECK_NOTNULL(op_src); | ||||
| const ge::onnx::NodeProto *node_src = reinterpret_cast<const ge::onnx::NodeProto *>(op_src); | |||||
| const ge::onnx::NodeProto *node_src = PtrToPtr<const Message, const ge::onnx::NodeProto>(op_src); | |||||
| GE_CHECK_NOTNULL(node_src); | GE_CHECK_NOTNULL(node_src); | ||||
| GELOGI("Onnx op node name = %s, op type= %s, parse params.", node_src->name().c_str(), node_src->op_type().c_str()); | GELOGI("Onnx op node name = %s, op type= %s, parse params.", node_src->name().c_str(), node_src->op_type().c_str()); | ||||
| @@ -18,6 +18,7 @@ | |||||
| #include <unordered_map> | #include <unordered_map> | ||||
| #include "common/util.h" | #include "common/util.h" | ||||
| #include "graph/debug/ge_attr_define.h" | #include "graph/debug/ge_attr_define.h" | ||||
| #include "graph/def_types.h" | |||||
| #include "parser/common/op_parser_factory.h" | #include "parser/common/op_parser_factory.h" | ||||
| #include "framework/omg/parser/parser_inner_ctx.h" | #include "framework/omg/parser/parser_inner_ctx.h" | ||||
| #include "parser/onnx/onnx_util.h" | #include "parser/onnx/onnx_util.h" | ||||
| @@ -28,7 +29,7 @@ using namespace ge::parser; | |||||
| namespace ge { | namespace ge { | ||||
| Status OnnxDataParser::ParseParams(const Message *op_src, ge::Operator &op_def) { | Status OnnxDataParser::ParseParams(const Message *op_src, ge::Operator &op_def) { | ||||
| GE_CHECK_NOTNULL(op_src); | GE_CHECK_NOTNULL(op_src); | ||||
| const ge::onnx::NodeProto *node_src = reinterpret_cast<const ge::onnx::NodeProto *>(op_src); | |||||
| const ge::onnx::NodeProto *node_src = PtrToPtr<const Message, const ge::onnx::NodeProto>(op_src); | |||||
| GE_CHECK_NOTNULL(node_src); | GE_CHECK_NOTNULL(node_src); | ||||
| GELOGD("Onnx op node name = %s, op type= %s, parse params", node_src->name().c_str(), node_src->op_type().c_str()); | GELOGD("Onnx op node name = %s, op type= %s, parse params", node_src->name().c_str(), node_src->op_type().c_str()); | ||||
| if (ParseInputFromModel(op_src, op_def) != SUCCESS) { | if (ParseInputFromModel(op_src, op_def) != SUCCESS) { | ||||
| @@ -72,7 +73,7 @@ int64_t OnnxDataParser::ParseInputTensor(const ge::onnx::AttributeProto &attribu | |||||
| Status OnnxDataParser::ParseInputFromModel(const Message *op_src, ge::Operator &op_def) { | Status OnnxDataParser::ParseInputFromModel(const Message *op_src, ge::Operator &op_def) { | ||||
| GE_CHECK_NOTNULL(op_src); | GE_CHECK_NOTNULL(op_src); | ||||
| const ge::onnx::NodeProto *node = reinterpret_cast<const ge::onnx::NodeProto *>(op_src); | |||||
| const ge::onnx::NodeProto *node = PtrToPtr<const Message, const ge::onnx::NodeProto>(op_src); | |||||
| GE_CHECK_NOTNULL(node); | GE_CHECK_NOTNULL(node); | ||||
| // Get attr t:'input_tensor' form NodeProto | // Get attr t:'input_tensor' form NodeProto | ||||
| @@ -767,8 +767,8 @@ Status OnnxModelParser::AdaptAndFindAllOnnxGraph(ge::onnx::GraphProto &root_onnx | |||||
| return FAILED; | return FAILED; | ||||
| } | } | ||||
| for (const auto &onnx_graph : onnx_graphs) { | |||||
| onnx_graph_tasks.push(onnx_graph); | |||||
| for (const auto &sub_onnx_graph : onnx_graphs) { | |||||
| onnx_graph_tasks.push(sub_onnx_graph); | |||||
| } | } | ||||
| for (const auto &itr : name_to_onnx_subgraph) { | for (const auto &itr : name_to_onnx_subgraph) { | ||||
| name_to_onnx_graph.emplace(itr.first, itr.second); | name_to_onnx_graph.emplace(itr.first, itr.second); | ||||
| @@ -50,6 +50,9 @@ class PARSER_FUNC_VISIBILITY SubgraphAdapter { | |||||
| virtual domi::Status AdaptAndFindAllSubgraphs(ge::onnx::NodeProto *parent_op, | virtual domi::Status AdaptAndFindAllSubgraphs(ge::onnx::NodeProto *parent_op, | ||||
| std::vector<ge::onnx::GraphProto *> &onnx_graphs, | std::vector<ge::onnx::GraphProto *> &onnx_graphs, | ||||
| std::map<std::string, ge::onnx::GraphProto *> &name_to_onnx_graph) { | std::map<std::string, ge::onnx::GraphProto *> &name_to_onnx_graph) { | ||||
| (void)parent_op; | |||||
| (void)onnx_graphs; | |||||
| (void)name_to_onnx_graph; | |||||
| return domi::SUCCESS; | return domi::SUCCESS; | ||||
| } | } | ||||
| }; | }; | ||||
| @@ -32,6 +32,7 @@ | |||||
| #endif | #endif | ||||
| #include <map> | #include <map> | ||||
| #include <memory> | |||||
| #include <functional> | #include <functional> | ||||
| #include "subgraph_adapter.h" | #include "subgraph_adapter.h" | ||||
| @@ -19,6 +19,7 @@ | |||||
| #include "framework/omg/parser/parser_types.h" | #include "framework/omg/parser/parser_types.h" | ||||
| #include "common/util.h" | #include "common/util.h" | ||||
| #include "framework/common/debug/ge_log.h" | #include "framework/common/debug/ge_log.h" | ||||
| #include "graph/def_types.h" | |||||
| #include "parser/common/op_parser_factory.h" | #include "parser/common/op_parser_factory.h" | ||||
| #include "register/op_registry.h" | #include "register/op_registry.h" | ||||
| #include "register/register.h" | #include "register/register.h" | ||||
| @@ -42,7 +43,7 @@ Status TensorFlowAutoMappingParserAdapter::ParseParams(const Message *op_src, ge | |||||
| GELOGE(PARAM_INVALID, "Op src is null"); | GELOGE(PARAM_INVALID, "Op src is null"); | ||||
| return PARAM_INVALID; | return PARAM_INVALID; | ||||
| } | } | ||||
| const NodeDef *node = reinterpret_cast<const NodeDef *>(op_src); | |||||
| const NodeDef *node = PtrToPtr<const Message, const NodeDef>(op_src); | |||||
| GELOGD("TF op node name = %s, op type= %s, parse params", node->name().c_str(), node->op().c_str()); | GELOGD("TF op node name = %s, op type= %s, parse params", node->name().c_str(), node->op().c_str()); | ||||
| if (op_dest == nullptr) { | if (op_dest == nullptr) { | ||||
| REPORT_INNER_ERROR("E19999", "Param op_dest is nullptr, check invalid"); | REPORT_INNER_ERROR("E19999", "Param op_dest is nullptr, check invalid"); | ||||
| @@ -31,7 +31,7 @@ Status TensorFlowEnterParser::ParseParams(const Message *op_src, ge::OpDescPtr & | |||||
| GE_CHECK_NOTNULL(op_desc); | GE_CHECK_NOTNULL(op_desc); | ||||
| const std::string name = op_desc->GetName(); | const std::string name = op_desc->GetName(); | ||||
| const NodeDef *node = reinterpret_cast<const NodeDef *>(op_src); | |||||
| const NodeDef *node = PtrToPtr<const Message, const NodeDef>(op_src); | |||||
| domi::tensorflow::AttrValue str_attr; | domi::tensorflow::AttrValue str_attr; | ||||
| if (!TensorFlowUtil::FindAttrValue(node, ENTER_ATTR_FRAME_NAME, str_attr)) { | if (!TensorFlowUtil::FindAttrValue(node, ENTER_ATTR_FRAME_NAME, str_attr)) { | ||||
| REPORT_CALL_ERROR("E19999", "In NodeDef:%s attr:%s not exist, check invalid", | REPORT_CALL_ERROR("E19999", "In NodeDef:%s attr:%s not exist, check invalid", | ||||
| @@ -21,6 +21,7 @@ | |||||
| #include "graph/debug/ge_attr_define.h" | #include "graph/debug/ge_attr_define.h" | ||||
| #include "parser/common/op_parser_factory.h" | #include "parser/common/op_parser_factory.h" | ||||
| #include "framework/omg/parser/parser_types.h" | #include "framework/omg/parser/parser_types.h" | ||||
| #include "graph/def_types.h" | |||||
| using domi::TENSORFLOW; | using domi::TENSORFLOW; | ||||
| using ge::parser::MERGE; | using ge::parser::MERGE; | ||||
| @@ -30,7 +31,7 @@ Status TensorFlowMergeParser::ParseParams(const Message *op_src, ge::OpDescPtr & | |||||
| GE_CHECK_NOTNULL(op_src); | GE_CHECK_NOTNULL(op_src); | ||||
| GE_CHECK_NOTNULL(op_desc); | GE_CHECK_NOTNULL(op_desc); | ||||
| const NodeDef *node = reinterpret_cast<const NodeDef *>(op_src); | |||||
| const NodeDef *node = PtrToPtr<const Message, const NodeDef>(op_src); | |||||
| domi::tensorflow::AttrValue attr_num; | domi::tensorflow::AttrValue attr_num; | ||||
| if (!(TensorFlowUtil::FindAttrValue(node, ATTR_NAME_N, attr_num))) { | if (!(TensorFlowUtil::FindAttrValue(node, ATTR_NAME_N, attr_num))) { | ||||
| GELOGW("In NodeDef %s dynamic attr [%s] is not exist.", op_desc->GetName().c_str(), ATTR_NAME_N.c_str()); | GELOGW("In NodeDef %s dynamic attr [%s] is not exist.", op_desc->GetName().c_str(), ATTR_NAME_N.c_str()); | ||||
| @@ -1515,7 +1515,7 @@ Status TensorFlowModelParser::ParseAllGraph(const google::protobuf::Message *pro | |||||
| if (tensorflow_op_map.find(node_op) == tensorflow_op_map.end()) { | if (tensorflow_op_map.find(node_op) == tensorflow_op_map.end()) { | ||||
| GELOGW("%s not found in tensorflow_op_map.", node_op.c_str()); | GELOGW("%s not found in tensorflow_op_map.", node_op.c_str()); | ||||
| } | } | ||||
| Status ret = AddNode(node_def, graph, scope_graph); | |||||
| ret = AddNode(node_def, graph, scope_graph); | |||||
| if (ret != SUCCESS) { | if (ret != SUCCESS) { | ||||
| GELOGE(ret, "Add op[%s] failed", node_def->name().c_str()); | GELOGE(ret, "Add op[%s] failed", node_def->name().c_str()); | ||||
| DeleteFuisonNodeDef(); | DeleteFuisonNodeDef(); | ||||
| @@ -1675,7 +1675,6 @@ Status TensorFlowModelParser::CheckInputNodeName(const string &input_node_name, | |||||
| } | } | ||||
| } | } | ||||
| int32_t tmp_index = 0; | |||||
| auto find = tmp_input_node_name.find(":"); | auto find = tmp_input_node_name.find(":"); | ||||
| if (find == string::npos) { | if (find == string::npos) { | ||||
| *node_name = tmp_input_node_name; | *node_name = tmp_input_node_name; | ||||
| @@ -1683,7 +1682,7 @@ Status TensorFlowModelParser::CheckInputNodeName(const string &input_node_name, | |||||
| if (index == nullptr) { | if (index == nullptr) { | ||||
| return SUCCESS; | return SUCCESS; | ||||
| } | } | ||||
| *index = tmp_index; | |||||
| *index = 0; | |||||
| return SUCCESS; | return SUCCESS; | ||||
| } | } | ||||
| @@ -1818,17 +1817,20 @@ Status TensorFlowModelParser::GetInPutIndex(shared_ptr<ge::ScopeGraph> &scope_gr | |||||
| auto &impl = scope_graph->impl_; | auto &impl = scope_graph->impl_; | ||||
| return impl->GetInputOrOutputIndex(info, old_index, true, new_index); | return impl->GetInputOrOutputIndex(info, old_index, true, new_index); | ||||
| } | } | ||||
| return SUCCESS; | |||||
| GELOGE(INTERNAL_ERROR, "Fusion op should come from scope fusion pass, node name:%s, fusion node name:%s", | |||||
| info.node_name.c_str(), info.fusion_node_name.c_str()); | |||||
| return INTERNAL_ERROR; | |||||
| } | } | ||||
| Status TensorFlowModelParser::GetOutPutIndex(shared_ptr<ge::ScopeGraph> &scope_graph, const ge::ScopeFusionOpInfo &info, | Status TensorFlowModelParser::GetOutPutIndex(shared_ptr<ge::ScopeGraph> &scope_graph, const ge::ScopeFusionOpInfo &info, | ||||
| const int32_t old_index, int32_t &new_index) { | const int32_t old_index, int32_t &new_index) { | ||||
| GE_CHECK_NOTNULL(scope_graph); | GE_CHECK_NOTNULL(scope_graph); | ||||
| Status ret; | |||||
| if (info.scope_pass) { | if (info.scope_pass) { | ||||
| auto &impl = scope_graph->impl_; | auto &impl = scope_graph->impl_; | ||||
| ret = impl->GetInputOrOutputIndex(info, old_index, false, new_index); | |||||
| return impl->GetInputOrOutputIndex(info, old_index, false, new_index); | |||||
| } | } | ||||
| return ret; | |||||
| GELOGE(INTERNAL_ERROR, "Fusion op should come from scope fusion pass, node name:%s, fusion node name:%s", | |||||
| info.node_name.c_str(), info.fusion_node_name.c_str()); | |||||
| return INTERNAL_ERROR; | |||||
| } | } | ||||
| bool TensorFlowModelParser::ConstOpNeedUpdate(const string &op_name) { | bool TensorFlowModelParser::ConstOpNeedUpdate(const string &op_name) { | ||||
| @@ -2007,15 +2009,13 @@ Status TensorFlowModelParser::EraseNormalOpOutputIfChild(shared_ptr<ge::ScopeGra | |||||
| for (auto iter = normal_op_node_context.output_map.begin(); iter != normal_op_node_context.output_map.end();) { | for (auto iter = normal_op_node_context.output_map.begin(); iter != normal_op_node_context.output_map.end();) { | ||||
| string output_node_name = iter->first; | string output_node_name = iter->first; | ||||
| ge::ScopeFusionOpInfo to_info; | ge::ScopeFusionOpInfo to_info; | ||||
| int32_t from_index = 0; | |||||
| int32_t to_index = 0; | |||||
| if (IsFusionOpChild(output_node_name, &to_info) && | if (IsFusionOpChild(output_node_name, &to_info) && | ||||
| nodedef_map_[output_node_name]->op() != TENSORFLOWF_NODE_OP_CONST) { | nodedef_map_[output_node_name]->op() != TENSORFLOWF_NODE_OP_CONST) { | ||||
| // Fuse operator, update index | // Fuse operator, update index | ||||
| std::vector<std::pair<int32_t, int32_t>> &pairs = iter->second; | std::vector<std::pair<int32_t, int32_t>> &pairs = iter->second; | ||||
| int32_t to_index = 0; | |||||
| for (auto &pair : pairs) { | for (auto &pair : pairs) { | ||||
| from_index = pair.first; | |||||
| int32_t from_index = pair.first; | |||||
| GE_RETURN_WITH_LOG_IF_ERROR(GetInPutIndex(scope_graph, to_info, pair.second, to_index), | GE_RETURN_WITH_LOG_IF_ERROR(GetInPutIndex(scope_graph, to_info, pair.second, to_index), | ||||
| "GetInPutIndex failed ,output_node_name %s.", output_node_name.c_str()); | "GetInPutIndex failed ,output_node_name %s.", output_node_name.c_str()); | ||||
| tmp_output_map[to_info.fusion_node_name].push_back({from_index, to_index}); | tmp_output_map[to_info.fusion_node_name].push_back({from_index, to_index}); | ||||
| @@ -2044,15 +2044,13 @@ Status TensorFlowModelParser::UpdateNormalOpContext(shared_ptr<ge::ScopeGraph> & | |||||
| for (auto iter = normal_op_node_context.input_map.begin(); iter != normal_op_node_context.input_map.end();) { | for (auto iter = normal_op_node_context.input_map.begin(); iter != normal_op_node_context.input_map.end();) { | ||||
| string input_node_name = iter->first; | string input_node_name = iter->first; | ||||
| ge::ScopeFusionOpInfo from_info; | ge::ScopeFusionOpInfo from_info; | ||||
| int32_t from_index = 0; | |||||
| int32_t to_index = 0; | |||||
| if (IsFusionOpChild(input_node_name, &from_info) && | if (IsFusionOpChild(input_node_name, &from_info) && | ||||
| nodedef_map_[input_node_name]->op() != TENSORFLOWF_NODE_OP_CONST) { | nodedef_map_[input_node_name]->op() != TENSORFLOWF_NODE_OP_CONST) { | ||||
| // Fuse operator, update index | // Fuse operator, update index | ||||
| std::vector<std::pair<int32_t, int32_t>> &pairs = iter->second; | std::vector<std::pair<int32_t, int32_t>> &pairs = iter->second; | ||||
| int32_t from_index = 0; | |||||
| for (auto &pair : pairs) { | for (auto &pair : pairs) { | ||||
| to_index = pair.second; | |||||
| int32_t to_index = pair.second; | |||||
| GE_RETURN_WITH_LOG_IF_ERROR(GetOutPutIndex(scope_graph, from_info, pair.first, from_index), | GE_RETURN_WITH_LOG_IF_ERROR(GetOutPutIndex(scope_graph, from_info, pair.first, from_index), | ||||
| "GetOutPutIndex failed ,input_node_name %s.", input_node_name.c_str()); | "GetOutPutIndex failed ,input_node_name %s.", input_node_name.c_str()); | ||||
| tmp_input_map[from_info.fusion_node_name].push_back({from_index, to_index}); | tmp_input_map[from_info.fusion_node_name].push_back({from_index, to_index}); | ||||
| @@ -2274,7 +2272,7 @@ Status TensorFlowModelParser::ParseProto(const google::protobuf::Message *proto, | |||||
| } | } | ||||
| // Do not exit immediately when there is an error, wait until all errors are collected before exiting | // Do not exit immediately when there is an error, wait until all errors are collected before exiting | ||||
| Status ret = AddFmkNodeDefToMap(node_def, op_node_name_list); | |||||
| ret = AddFmkNodeDefToMap(node_def, op_node_name_list); | |||||
| GE_CHK_STATUS_EXEC(ret, return PARAM_INVALID, "add node_def to map failed"); | GE_CHK_STATUS_EXEC(ret, return PARAM_INVALID, "add node_def to map failed"); | ||||
| } | } | ||||
| PARSER_TIMESTAMP_END(AddFmkNodeDefToMap, "TensorFlowModelParser::AddFmkNodeDefToMap"); | PARSER_TIMESTAMP_END(AddFmkNodeDefToMap, "TensorFlowModelParser::AddFmkNodeDefToMap"); | ||||
| @@ -2563,6 +2561,7 @@ Status TensorFlowModelParser::OptimizeSnapShot(domi::tensorflow::NodeDef *curr_m | |||||
| domi::tensorflow::NodeDef *output_node_def = nodedef_map[output_node_name]; | domi::tensorflow::NodeDef *output_node_def = nodedef_map[output_node_name]; | ||||
| GE_CHECK_NOTNULL(output_node_def); | GE_CHECK_NOTNULL(output_node_def); | ||||
| auto inputs = output_node_def->mutable_input(); | auto inputs = output_node_def->mutable_input(); | ||||
| std::vector<std::string> added_inputs; | |||||
| for (auto &input : *inputs) { | for (auto &input : *inputs) { | ||||
| string node_name; | string node_name; | ||||
| bool is_control = false; | bool is_control = false; | ||||
| @@ -2596,12 +2595,15 @@ Status TensorFlowModelParser::OptimizeSnapShot(domi::tensorflow::NodeDef *curr_m | |||||
| } | } | ||||
| } | } | ||||
| if (!is_exist_input) { | if (!is_exist_input) { | ||||
| output_node_def->add_input("^" + item); | |||||
| GELOGD("Optimize Snapshot node, dest:%s, set control input:%s.", output_node_name.c_str(), item.c_str()); | |||||
| added_inputs.push_back("^" + item); | |||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| } | } | ||||
| for (std::string added_input : added_inputs) { | |||||
| GELOGD("Optimize Snapshot node, dest:%s, set control input:%s.", output_node_name.c_str(), added_input.c_str()); | |||||
| output_node_def->add_input(added_input); | |||||
| } | |||||
| } | } | ||||
| // Clear the input of snapshot and become an isolated node | // Clear the input of snapshot and become an isolated node | ||||
| curr_mode_def->clear_input(); | curr_mode_def->clear_input(); | ||||
| @@ -3128,8 +3130,7 @@ Status TensorFlowModelParser::TrimGraphByInput(const domi::tensorflow::GraphDef | |||||
| output_graph_def->Clear(); | output_graph_def->Clear(); | ||||
| for (const NodeDef &node : filtered_graph_def.node()) { | for (const NodeDef &node : filtered_graph_def.node()) { | ||||
| if (input_nodes.count(node.name())) { | if (input_nodes.count(node.name())) { | ||||
| NodeDef placeholder_node; | |||||
| placeholder_node = node; | |||||
| NodeDef placeholder_node = node; | |||||
| placeholder_node.clear_input(); | placeholder_node.clear_input(); | ||||
| GE_IF_BOOL_EXEC(node.op() != "Placeholder", placeholder_node.set_op("Placeholder")); | GE_IF_BOOL_EXEC(node.op() != "Placeholder", placeholder_node.set_op("Placeholder")); | ||||
| domi::tensorflow::AttrValue attr_value; | domi::tensorflow::AttrValue attr_value; | ||||
| @@ -3202,8 +3203,7 @@ Status TensorFlowModelParser::TrimGraphByOutput(const domi::tensorflow::GraphDef | |||||
| output_graph_def->Clear(); | output_graph_def->Clear(); | ||||
| for (const NodeDef &node : filtered_graph_def.node()) { | for (const NodeDef &node : filtered_graph_def.node()) { | ||||
| if (input_nodes.count(node.name())) { | if (input_nodes.count(node.name())) { | ||||
| NodeDef placeholder_node; | |||||
| placeholder_node = node; | |||||
| NodeDef placeholder_node = node; | |||||
| placeholder_node.clear_input(); | placeholder_node.clear_input(); | ||||
| GE_IF_BOOL_EXEC(node.op() != "Placeholder", placeholder_node.set_op("Placeholder")); | GE_IF_BOOL_EXEC(node.op() != "Placeholder", placeholder_node.set_op("Placeholder")); | ||||
| domi::tensorflow::AttrValue attr_value; | domi::tensorflow::AttrValue attr_value; | ||||
| @@ -3725,8 +3725,8 @@ void TensorFlowModelParser::UpdateInnerInputMap(const string &fusion_op_name, Op | |||||
| std::map<std::string, std::vector<std::pair<int32_t, int32_t>>> tmp_input_map; | std::map<std::string, std::vector<std::pair<int32_t, int32_t>>> tmp_input_map; | ||||
| for (auto iter = op_node_context.input_map.begin(); iter != op_node_context.input_map.end();) { | for (auto iter = op_node_context.input_map.begin(); iter != op_node_context.input_map.end();) { | ||||
| string src_name = iter->first; | string src_name = iter->first; | ||||
| std::vector<std::pair<int32_t, int32_t>> &input_idx = iter->second; | |||||
| if (src_name == ge::kInputFromFusionScope) { | if (src_name == ge::kInputFromFusionScope) { | ||||
| std::vector<std::pair<int32_t, int32_t>> &input_idx = iter->second; | |||||
| for (const auto &in_pair : input_idx) { | for (const auto &in_pair : input_idx) { | ||||
| if (in_pair.second != kControlSlot) { | if (in_pair.second != kControlSlot) { | ||||
| auto data = remap_data_input[fusion_op_name + std::to_string(in_pair.first)]; | auto data = remap_data_input[fusion_op_name + std::to_string(in_pair.first)]; | ||||
| @@ -3772,8 +3772,8 @@ void TensorFlowModelParser::UpdateInnerOutputMap(const string &fusion_op_name, O | |||||
| std::map<std::string, std::vector<std::pair<int32_t, int32_t>>> tmp_output_map; | std::map<std::string, std::vector<std::pair<int32_t, int32_t>>> tmp_output_map; | ||||
| for (auto iter = op_node_context.output_map.begin(); iter != op_node_context.output_map.end();) { | for (auto iter = op_node_context.output_map.begin(); iter != op_node_context.output_map.end();) { | ||||
| string dst_name = iter->first; | string dst_name = iter->first; | ||||
| std::vector<std::pair<int32_t, int32_t>> &output_idx = iter->second; | |||||
| if (dst_name == ge::kOutputToFusionScope) { | if (dst_name == ge::kOutputToFusionScope) { | ||||
| std::vector<std::pair<int32_t, int32_t>> &output_idx = iter->second; | |||||
| for (const auto &out_pair : output_idx) { | for (const auto &out_pair : output_idx) { | ||||
| if (out_pair.second != kControlSlot) { | if (out_pair.second != kControlSlot) { | ||||
| auto data_outputs = remap_data_output[fusion_op_name + std::to_string(out_pair.second)]; | auto data_outputs = remap_data_output[fusion_op_name + std::to_string(out_pair.second)]; | ||||
| @@ -33,9 +33,6 @@ Status TensorFlowReshapeParser::ParseDesc(const domi::tensorflow::AttrValue &att | |||||
| GE_CHK_BOOL_RET_STATUS(TensorFlowUtil::ParseFromAttrValueList(ge_desc, a_list, 0, tf_datatype), PARAM_INVALID, | GE_CHK_BOOL_RET_STATUS(TensorFlowUtil::ParseFromAttrValueList(ge_desc, a_list, 0, tf_datatype), PARAM_INVALID, | ||||
| "parse ge_desc failed."); | "parse ge_desc failed."); | ||||
| uint32_t size_type = 1; | uint32_t size_type = 1; | ||||
| int64_t real_size = 1; | |||||
| int64_t tmp_dim = 0; | |||||
| auto data_type = ge_desc.GetDataType(); | auto data_type = ge_desc.GetDataType(); | ||||
| bool type_ret = ge::TypeUtils::GetDataTypeLength(data_type, size_type); | bool type_ret = ge::TypeUtils::GetDataTypeLength(data_type, size_type); | ||||
| GE_IF_BOOL_EXEC(!type_ret, | GE_IF_BOOL_EXEC(!type_ret, | ||||
| @@ -45,8 +42,9 @@ Status TensorFlowReshapeParser::ParseDesc(const domi::tensorflow::AttrValue &att | |||||
| ge::TypeUtils::DataTypeToSerialString(data_type).c_str()); | ge::TypeUtils::DataTypeToSerialString(data_type).c_str()); | ||||
| return PARAM_INVALID); | return PARAM_INVALID); | ||||
| // calculate size | // calculate size | ||||
| int64_t real_size = 1; | |||||
| for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) { | for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) { | ||||
| tmp_dim = ge_desc.GetShape().GetDim(j); | |||||
| int64_t tmp_dim = ge_desc.GetShape().GetDim(j); | |||||
| GE_IF_BOOL_EXEC(tmp_dim < 0, real_size = tmp_dim * (-1) * real_size; continue;); | GE_IF_BOOL_EXEC(tmp_dim < 0, real_size = tmp_dim * (-1) * real_size; continue;); | ||||
| real_size *= tmp_dim; | real_size *= tmp_dim; | ||||
| } | } | ||||
| @@ -155,10 +155,14 @@ Status TensorFlowShapeNParser::ParseParams(const Message *op_src, ge::OpDescPtr | |||||
| // AUTO GEN PLEASE DO NOT MODIFY IT | // AUTO GEN PLEASE DO NOT MODIFY IT | ||||
| Status TensorFlowShapeNParser::PreParseParams(const domi::tensorflow::NodeDef *node, ShapeNOperator *op) { | Status TensorFlowShapeNParser::PreParseParams(const domi::tensorflow::NodeDef *node, ShapeNOperator *op) { | ||||
| (void)node; | |||||
| (void)op; | |||||
| return SUCCESS; | return SUCCESS; | ||||
| } | } | ||||
| Status TensorFlowShapeNParser::PostParseParams(const domi::tensorflow::NodeDef *node, ShapeNOperator *op) { | Status TensorFlowShapeNParser::PostParseParams(const domi::tensorflow::NodeDef *node, ShapeNOperator *op) { | ||||
| (void)node; | |||||
| (void)op; | |||||
| return SUCCESS; | return SUCCESS; | ||||
| } | } | ||||
| @@ -37,9 +37,6 @@ Status TensorFlowSqueezeParser::ParseDesc(const domi::tensorflow::AttrValue &att | |||||
| GE_CHK_BOOL_RET_STATUS(TensorFlowUtil::ParseFromAttrValueList(ge_desc, a_list, 0, tf_datatype), domi::PARAM_INVALID, | GE_CHK_BOOL_RET_STATUS(TensorFlowUtil::ParseFromAttrValueList(ge_desc, a_list, 0, tf_datatype), domi::PARAM_INVALID, | ||||
| "parse ge_desc failed."); | "parse ge_desc failed."); | ||||
| uint32_t size_type; | uint32_t size_type; | ||||
| int64_t real_size = 1; | |||||
| int64_t tmp_dim = 0; | |||||
| auto data_type = ge_desc.GetDataType(); | auto data_type = ge_desc.GetDataType(); | ||||
| bool type_ret = ge::TypeUtils::GetDataTypeLength(data_type, size_type); | bool type_ret = ge::TypeUtils::GetDataTypeLength(data_type, size_type); | ||||
| GE_IF_BOOL_EXEC(!type_ret, | GE_IF_BOOL_EXEC(!type_ret, | ||||
| @@ -49,8 +46,9 @@ Status TensorFlowSqueezeParser::ParseDesc(const domi::tensorflow::AttrValue &att | |||||
| ge::TypeUtils::DataTypeToSerialString(data_type).c_str()); | ge::TypeUtils::DataTypeToSerialString(data_type).c_str()); | ||||
| return domi::PARAM_INVALID); | return domi::PARAM_INVALID); | ||||
| // calculate size | // calculate size | ||||
| int64_t real_size = 1; | |||||
| for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) { | for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) { | ||||
| tmp_dim = ge_desc.GetShape().GetDim(j); | |||||
| int64_t tmp_dim = ge_desc.GetShape().GetDim(j); | |||||
| GE_IF_BOOL_EXEC(tmp_dim < 0, real_size = tmp_dim * (-1) * real_size; continue;); | GE_IF_BOOL_EXEC(tmp_dim < 0, real_size = tmp_dim * (-1) * real_size; continue;); | ||||
| PARSER_INT64_MULCHECK(real_size, tmp_dim); | PARSER_INT64_MULCHECK(real_size, tmp_dim); | ||||
| real_size *= tmp_dim; | real_size *= tmp_dim; | ||||
| @@ -267,13 +267,12 @@ FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY domi::Status TensorFlowUtil::Tr | |||||
| GE_CHK_BOOL_RET_STATUS(ParseFromAttrValueList(ge_desc, a_list, i, tf_datatype), PARAM_INVALID, | GE_CHK_BOOL_RET_STATUS(ParseFromAttrValueList(ge_desc, a_list, i, tf_datatype), PARAM_INVALID, | ||||
| "parse ge_desc failed."); | "parse ge_desc failed."); | ||||
| uint32_t size_type = 1; | uint32_t size_type = 1; | ||||
| int64_t tmp_dim = 0; | |||||
| auto data_type = ge_desc.GetDataType(); | auto data_type = ge_desc.GetDataType(); | ||||
| GE_CHK_BOOL_RET_STATUS(ge::TypeUtils::GetDataTypeLength(data_type, size_type), PARAM_INVALID, | GE_CHK_BOOL_RET_STATUS(ge::TypeUtils::GetDataTypeLength(data_type, size_type), PARAM_INVALID, | ||||
| "dataType no define size , parse ge_desc failed."); | "dataType no define size , parse ge_desc failed."); | ||||
| // get size | // get size | ||||
| for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) { | for (uint32_t j = 0; j < ge_desc.GetShape().GetDimNum(); ++j) { | ||||
| tmp_dim = ge_desc.GetShape().GetDim(j); | |||||
| int64_t tmp_dim = ge_desc.GetShape().GetDim(j); | |||||
| // The shape infered by fusedbatchnormgrad and mean calling tensorflow is not accurate. | // The shape infered by fusedbatchnormgrad and mean calling tensorflow is not accurate. | ||||
| // Here, special treatment is given to the two operators. | // Here, special treatment is given to the two operators. | ||||
| @@ -98,7 +98,7 @@ void ErrorManager::SetStage(const std::string &first_stage, const std::string &s | |||||
| } | } | ||||
| struct error_message::Context &ErrorManager::GetErrorManagerContext() { | struct error_message::Context &ErrorManager::GetErrorManagerContext() { | ||||
| struct error_message::Context error_context; | |||||
| static struct error_message::Context error_context; | |||||
| return error_context; | return error_context; | ||||
| } | } | ||||
| @@ -2387,5 +2387,23 @@ TEST_F(STestTensorflowParser, tensorflow_GraphDefOptimizeIdentity_test) | |||||
| Status ret = tensorflow_parser.GraphDefOptimizeIdentity(&graph_def, nodedef_map, nodedef_to_optimize); | Status ret = tensorflow_parser.GraphDefOptimizeIdentity(&graph_def, nodedef_map, nodedef_to_optimize); | ||||
| EXPECT_EQ(ret, ge::PARAM_INVALID); | EXPECT_EQ(ret, ge::PARAM_INVALID); | ||||
| } | } | ||||
| TEST_F(STestTensorflowParser, tensorflow_optimizer_snapshot_no_retval_test) { | |||||
| std::string caseDir = __FILE__; | |||||
| std::size_t idx = caseDir.find_last_of("/"); | |||||
| caseDir = caseDir.substr(0, idx); | |||||
| const std::string root_proto = caseDir + "/origin_models/test_snapshot.pb"; | |||||
| domi::tensorflow::GraphDef graphDef; | |||||
| bool protoRet = | |||||
| parser::ReadProtoFromBinaryFile(root_proto.c_str(), &graphDef); | |||||
| ASSERT_EQ(protoRet, true); | |||||
| TensorFlowModelParser tensorflow_parser; | |||||
| ge::ComputeGraphPtr root_graph = | |||||
| ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph"); | |||||
| Status ret = tensorflow_parser.ParseProto( | |||||
| reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph); | |||||
| EXPECT_EQ(FAILED, ret); | |||||
| } | |||||
| } // namespace ge | } // namespace ge | ||||
| @@ -188,4 +188,33 @@ TEST_F(UtestTensorflowParser, tensorflow_parser_with_external_graph) { | |||||
| ret = TensorFlowModelParser::AddExternalGraph(root_graph); | ret = TensorFlowModelParser::AddExternalGraph(root_graph); | ||||
| EXPECT_EQ(ret, INTERNAL_ERROR); | EXPECT_EQ(ret, INTERNAL_ERROR); | ||||
| } | } | ||||
| TEST_F(UtestTensorflowParser, optimize_snapshot) { | |||||
| domi::tensorflow::GraphDef graph_def; | |||||
| auto mul_node = graph_def.add_node(); | |||||
| mul_node->set_name("optimizer/Mul"); | |||||
| mul_node->set_op("Mul"); | |||||
| mul_node->add_input("Snapshot:0"); | |||||
| auto snapshot_node = graph_def.add_node(); | |||||
| snapshot_node->set_name("Snapshot"); | |||||
| snapshot_node->set_op("Snapshot"); | |||||
| snapshot_node->add_input("loss_scale/read:0"); | |||||
| snapshot_node->add_input("^ShuffleNet/AssignMovingAvg"); | |||||
| auto identity_node = graph_def.add_node(); | |||||
| identity_node->set_name("loss_scale/read"); | |||||
| identity_node->set_op("Identity"); | |||||
| identity_node->add_input("loss_scale/ref:0"); | |||||
| auto assign_node = graph_def.add_node(); | |||||
| assign_node->set_name("ShuffleNet/AssignMovingAvg"); | |||||
| assign_node->set_op("AssignSub"); | |||||
| assign_node->add_input("ShuffleNet/moving_mean:0"); | |||||
| Status ret = TensorFlowModelParser().GraphDefOptimize(&graph_def); | |||||
| EXPECT_EQ(ret, ge::SUCCESS); | |||||
| } | |||||
| } // namespace ge | } // namespace ge | ||||