| @@ -1194,4 +1194,55 @@ TEST_F(UtestCaffeParser, CaffeWeightsParser_ReorderInput_test) | |||
| modelParser.ReorderInput(net); | |||
| } | |||
| TEST_F(UtestCaffeParser, CaffeOpParser_ParseParms_test) | |||
| { | |||
| CaffeOpParser parser; | |||
| std::string case_dir = __FILE__; | |||
| case_dir = case_dir.substr(0, case_dir.find_last_of("/")); | |||
| std::string caffe_proto = case_dir + "/../../../../../metadef/proto/caffe/"; | |||
| google::protobuf::compiler::DiskSourceTree sourceTree; | |||
| sourceTree.MapPath("project_root", caffe_proto); | |||
| google::protobuf::compiler::Importer importer(&sourceTree, nullptr); | |||
| importer.Import("project_root/caffe.proto"); | |||
| auto descriptor = importer.pool()->FindMessageTypeByName("domi.caffe.LayerParameter"); | |||
| ge::OpDescPtr op_desc_src = std::make_shared<ge::OpDesc>("Abs", "AbsVal"); | |||
| google::protobuf::DynamicMessageFactory factory; | |||
| const google::protobuf::Message *proto = factory.GetPrototype(descriptor); | |||
| const google::protobuf::Message *message = proto->New(); | |||
| ge::Operator op_src = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc_src); | |||
| Status ret = parser.ParseParams(message, op_src); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| } | |||
| TEST_F(UtestCaffeParser, CaffeModelParser_Constructor_and_delete) | |||
| { | |||
| CaffeModelParser modelParser; | |||
| domi::caffe::NetParameter net; | |||
| net.add_input("111"); | |||
| bool input_data_flag = true; | |||
| net.add_input_shape(); | |||
| Status ret = modelParser.ParseInput(net, input_data_flag); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| } | |||
| TEST_F(UtestCaffeParser, ParseFromMemory_success_graph) | |||
| { | |||
| std::string caseDir = __FILE__; | |||
| std::size_t idx = caseDir.find_last_of("/"); | |||
| caseDir = caseDir.substr(0, idx); | |||
| std::string modelFile = caseDir + "/caffe_model/caffe_add.pbtxt"; | |||
| std::string weight_file = caseDir + "/caffe_model/caffe_add.caffemodel"; | |||
| const char* tmp_tf_pb_model = modelFile.c_str(); | |||
| const char* tmp_tf_weight_model = weight_file.c_str(); | |||
| ge::Graph graph; | |||
| Status ret = ge::aclgrphParseCaffe(modelFile.c_str(), weight_file.c_str(), graph); | |||
| CaffeModelParser modelParser; | |||
| MemBuffer* memBuffer1 = ParerUTestsUtils::MemBufferFromFile(tmp_tf_pb_model); | |||
| ret = modelParser.ParseFromMemory((char*)memBuffer1->data, memBuffer1->size, graph); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| delete memBuffer1; | |||
| } | |||
| } // namespace ge | |||
| @@ -344,6 +344,7 @@ TEST_F(UtestAclGraphParser, test_operatoreq) | |||
| } | |||
| TEST_F(UtestAclGraphParser, test_pre_checker) { | |||
| TBEPluginLoader tbe_plugin; | |||
| PreChecker::Instance().fmk_op_types_ = nullptr; | |||
| const char* str = "iiii"; | |||
| PreChecker::OpId id = str; | |||
| @@ -69,4 +69,32 @@ TEST_F(UtestMessage2Operator, pb2json_one_field_json) { | |||
| Json json; | |||
| ge::Pb2Json::Message2Json(input_node, std::set<std::string>{}, json, true); | |||
| } | |||
| TEST_F(UtestMessage2Operator, pb2json_one_field_json_depth_max) { | |||
| ge::onnx::NodeProto input_node; | |||
| ge::onnx::AttributeProto *attribute = input_node.add_attribute(); | |||
| attribute->set_name("attribute"); | |||
| attribute->set_type(onnx::AttributeProto::AttributeType(1)); | |||
| ge::onnx::TensorProto *attribute_tensor = attribute->mutable_t(); | |||
| attribute_tensor->set_data_type(1); | |||
| attribute_tensor->add_dims(4); | |||
| attribute_tensor->set_raw_data("\007"); | |||
| Json json; | |||
| ge::Pb2Json::Message2Json(input_node, std::set<std::string>{}, json, true, 21); | |||
| } | |||
| TEST_F(UtestMessage2Operator, pb2json_one_field_json_type) { | |||
| ge::onnx::NodeProto input_node; | |||
| ge::onnx::AttributeProto *attribute = input_node.add_attribute(); | |||
| attribute->set_name("attribute"); | |||
| attribute->set_type(onnx::AttributeProto::AttributeType(1)); | |||
| ge::onnx::TensorProto *attribute_tensor = attribute->mutable_t(); | |||
| attribute_tensor->set_data_type(3); | |||
| attribute_tensor->add_dims(4); | |||
| attribute_tensor->set_raw_data("\007"); | |||
| Json json; | |||
| ge::Pb2Json::Message2Json(input_node, std::set<std::string>{}, json, true); | |||
| } | |||
| } // namespace ge | |||
| @@ -243,6 +243,7 @@ TEST_F(UtestOnnxParser, OnnxModelParser_ConvertToGeDataType_test) | |||
| EXPECT_EQ(ret, ge::DataType::DT_UNDEFINED); | |||
| } | |||
| TEST_F(UtestOnnxParser, OnnxModelParser_ParseConvertData_test) | |||
| { | |||
| OnnxConstantParser constant_parser; | |||
| @@ -278,6 +279,23 @@ TEST_F(UtestOnnxParser, OnnxModelParser_ParseConvertData_test) | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| } | |||
| TEST_F(UtestOnnxParser, OnnxModelParser_ParseConvertData_test_bool) | |||
| { | |||
| OnnxConstantParser constant_parser; | |||
| ge::onnx::TensorProto tensor_proto; | |||
| tensor_proto.set_data_type(OnnxDataType::INT32); | |||
| ge::Tensor tensor ; | |||
| TensorDesc tensor_desc = tensor.GetTensorDesc(); | |||
| tensor_desc.SetDataType(ge::DataType::DT_BOOL); | |||
| tensor.SetTensorDesc(tensor_desc); | |||
| int count = 1; | |||
| tensor_proto.set_raw_data("Test"); | |||
| Status ret = constant_parser.ParseConvertData(tensor_proto, tensor, count); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| } | |||
| TEST_F(UtestOnnxParser, OnnxConstantParser_ParseConvertTensor_test) | |||
| { | |||
| OnnxConstantParser constant_parser; | |||
| @@ -423,4 +441,21 @@ TEST_F(UtestOnnxParser, onnx_test_GetModelFromMemory) | |||
| EXPECT_EQ(ret, FAILED); | |||
| } | |||
| TEST_F(UtestOnnxParser, onnx_test_TransNodeToOperator_SetTensorData) | |||
| { | |||
| ge::onnx::ModelProto model_proto; | |||
| ge::onnx::GraphProto* graph = model_proto.mutable_graph(); | |||
| ge::onnx::NodeProto *node_proto = graph->add_node(); | |||
| node_proto->set_op_type("Add1"); | |||
| node_proto->set_domain("add.onnx"); | |||
| node_proto->set_name("Conv2D"); | |||
| ge::OpDescPtr op_desc_src = std::make_shared<ge::OpDesc>("Add", "add.onnx"); | |||
| ge::Operator op = ge::OpDescUtils::CreateOperatorFromOpDesc(op_desc_src); | |||
| std::string op_type = "Add"; | |||
| OnnxModelParser onnx_parser; | |||
| Status ret = onnx_parser.TransNodeToOperator(node_proto, op, op_type); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| } | |||
| } // namespace ge | |||
| @@ -0,0 +1,174 @@ | |||
| node { | |||
| name: "IteratorV2" | |||
| op: "IteratorV2" | |||
| attr { | |||
| key: "op_def" | |||
| value { | |||
| s: "\n\007GetNext\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\026\n\014channel_name\022\006string\210\001\001" | |||
| } | |||
| } | |||
| attr { | |||
| key: "output_types" | |||
| value { | |||
| list { | |||
| type: DT_INT64 | |||
| } | |||
| } | |||
| } | |||
| attr { | |||
| key: "output_tensor_desc" | |||
| value { | |||
| list { | |||
| func { | |||
| name: "0" | |||
| attr { | |||
| key: "serialize_datatype" | |||
| value: { | |||
| i: 9 | |||
| } | |||
| } | |||
| attr { | |||
| key: "serialize_format" | |||
| value: { | |||
| i: 1 | |||
| } | |||
| } | |||
| attr { | |||
| key: "serialize_shape" | |||
| value { | |||
| type: DT_INT32 | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| node { | |||
| name: "IteratorGetNext" | |||
| op: "IteratorGetNext" | |||
| input: "IteratorV2" | |||
| attr { | |||
| key: "output_types" | |||
| value { | |||
| list { | |||
| type: DT_INT64 | |||
| } | |||
| } | |||
| } | |||
| attr { | |||
| key: "op_def" | |||
| value { | |||
| s: "\n\007GetNext\032\032\n\ncomponents2\014output_types\"\036\n\014output_types\022\nlist(type)(\0010\001\" \n\routput_shapes\022\013list(shape)(\0010\001\"\026\n\014channel_name\022\006string\210\001\001" | |||
| } | |||
| } | |||
| attr { | |||
| key: "input_tensor_desc" | |||
| value { | |||
| list { | |||
| func { | |||
| name: "0" | |||
| attr { | |||
| key: "serialize_datatype" | |||
| value: { | |||
| i: 9 | |||
| } | |||
| } | |||
| attr { | |||
| key: "serialize_format" | |||
| value: { | |||
| i: 1 | |||
| } | |||
| } | |||
| attr { | |||
| key: "serialize_shape" | |||
| value { | |||
| type: DT_INT32 | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| attr { | |||
| key: "output_tensor_desc" | |||
| value { | |||
| list { | |||
| func { | |||
| name: "0" | |||
| attr { | |||
| key: "serialize_datatype" | |||
| value: { | |||
| i: 9 | |||
| } | |||
| } | |||
| attr { | |||
| key: "serialize_format" | |||
| value: { | |||
| i: 1 | |||
| } | |||
| } | |||
| attr { | |||
| key: "serialize_shape" | |||
| value { | |||
| list { | |||
| i: -1 | |||
| i: -1 | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| } | |||
| node { | |||
| name: "getnext_shape_0" | |||
| op: "Shape" | |||
| input: "IteratorGetNext" | |||
| attr { | |||
| key: "op_def" | |||
| value { | |||
| s: "\n\005Shape\022\n\n\005input\"\001T\032\022\n\006output\"\010out_type\"\t\n\001T\022\004type\"\034\n\010out_type\022\004type\032\0020\003:\006\n\0042\002\003\t" | |||
| } | |||
| } | |||
| } | |||
| node { | |||
| name: "retval_GetNext_0_0" | |||
| op: "_Retval" | |||
| input: "IteratorGetNext" | |||
| attr { | |||
| key: "index" | |||
| value { | |||
| i: 0 | |||
| } | |||
| } | |||
| attr { | |||
| key: "op_def" | |||
| value { | |||
| s: "" | |||
| } | |||
| } | |||
| } | |||
| node { | |||
| name: "retval_GetNext_0_1" | |||
| op: "_Retval" | |||
| input: "getnext_shape_0" | |||
| attr { | |||
| key: "index" | |||
| value { | |||
| i: 1 | |||
| } | |||
| } | |||
| attr { | |||
| key: "op_def" | |||
| value { | |||
| s: "" | |||
| } | |||
| } | |||
| } | |||
| library { | |||
| } | |||
| versions { | |||
| producer: 134 | |||
| } | |||
| @@ -3942,11 +3942,39 @@ TEST_F(UtestTensorflowParser, custom_parser_adapter_register) | |||
| ASSERT_EQ(nullptr, func); | |||
| } | |||
| static Status ParseParamsStub1(const google::protobuf::Message* op_src, ge::Operator& op_dest) { | |||
| return SUCCESS; | |||
| } | |||
| TEST_F(UtestTensorflowParser, tensorflow_parser_api_test) | |||
| { | |||
| REGISTER_CUSTOM_OP("Add11") | |||
| .FrameworkType(domi::TENSORFLOW) | |||
| .OriginOpType("Add11") | |||
| .ParseParamsFn(ParseParamsStub1); | |||
| std::map<std::string, std::string> options = {{"ge.runFlag", "1"}}; | |||
| options.insert(std::pair<string, string>(string(ge::FRAMEWORK_TYPE), to_string(domi::TENSORFLOW))); | |||
| Status ret = ParserInitialize(options); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| ret = ParserInitialize(options); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| ret = ParserFinalize(); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| ret = ParserFinalize(); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| } | |||
| TEST_F(UtestTensorflowParser, tensorflow_parser_api_test_cafee) | |||
| { | |||
| std::map<std::string, std::string> options = {{"ge.runFlag", "1"}}; | |||
| options.insert(std::pair<string, string>(string(ge::FRAMEWORK_TYPE), to_string(domi::CAFFE))); | |||
| Status ret = ParserInitialize(options); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| options.insert(std::pair<string, string>(string(ge::FRAMEWORK_TYPE), to_string(domi::CAFFE))); | |||
| ret = ParserInitialize(options); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| @@ -4154,6 +4182,36 @@ TEST_F(UtestTensorflowParser, parser_UpdateGraph_test) | |||
| EXPECT_EQ(ret, PARAM_INVALID); | |||
| } | |||
| TEST_F(UtestTensorflowParser, tensorflow_optimizer_fmk_fusion_op_) { | |||
| std::string caseDir = __FILE__; | |||
| std::size_t idx = caseDir.find_last_of("/"); | |||
| caseDir = caseDir.substr(0, idx); | |||
| const std::string root_proto = caseDir + "/origin_models/getnext_dynamic_fusion.pbtxt"; | |||
| domi::tensorflow::GraphDef graphDef; | |||
| bool protoRet = parser::ReadProtoFromText(root_proto.c_str(), &graphDef); | |||
| ASSERT_EQ(protoRet, true); | |||
| TensorFlowModelParser tensorflow_parser; | |||
| ge::ComputeGraphPtr root_graph = ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph"); | |||
| Status ret = tensorflow_parser.ParseProto(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph); | |||
| EXPECT_EQ(ret, SUCCESS); | |||
| EXPECT_EQ(root_graph->GetDirectNode().size(), 3); | |||
| } | |||
| TEST_F(UtestTensorflowParser, parser_UpdateGraph_node_0) | |||
| { | |||
| std::vector<NodePtr> nodes; | |||
| ge::ComputeGraphPtr subGraph = std::make_shared<ge::ComputeGraph>("default"); | |||
| ParserGraphOptimizer graphOptimizer(subGraph, domi::TENSORFLOW); | |||
| Status ret = graphOptimizer.UpdateGraph(nodes); | |||
| EXPECT_EQ(ret, PARAM_INVALID); | |||
| } | |||
| TEST_F(UtestTensorflowParser, parser_RebuildFusionNode_test) | |||
| { | |||
| ge::ComputeGraphPtr graph = std::make_shared<ge::ComputeGraph>(GRAPH_DEFAULT_NAME); | |||