Browse Source

!565 Add UT

Merge pull request !565 from 李正龙/ut_0613
pull/570/head
李正龙 i-robot 3 years ago
parent
commit
f753c5bc67
3 changed files with 119 additions and 6 deletions
  1. +36
    -1
      tests/ut/parser/testcase/caffe_parser_testcase/caffe_parser_unittest.cc
  2. +41
    -0
      tests/ut/parser/testcase/onnx_parser_testcase/onnx_parser_unittest.cc
  3. +42
    -5
      tests/ut/parser/testcase/tensorflow_parser_testcase/tensorflow_parser_unittest.cc

+ 36
- 1
tests/ut/parser/testcase/caffe_parser_testcase/caffe_parser_unittest.cc View File

@@ -40,6 +40,7 @@
#include "parser/common/acl_graph_parser_util.h"
#include "parser/caffe/caffe_reshape_parser.h"
#include "common/op_map.h"
#include "parser/common/prototype_pass_manager.h"
#undef protected
#undef private

@@ -51,6 +52,7 @@

using namespace domi::caffe;
using namespace ge;
using CreateFn = std::function<ProtoTypeBasePass *(void)>;

namespace ge {
class UtestCaffeParser : public testing::Test {
@@ -66,6 +68,11 @@ class UtestCaffeParser : public testing::Test {
void RegisterCustomOp();
};

class RegisterPass : public ProtoTypeBasePass {
public:
Status Run(google::protobuf::Message *message) { return SUCCESS; }
};

static ge::NodePtr GenNodeFromOpDesc(ge::OpDescPtr opDesc){
if (!opDesc) {
return nullptr;
@@ -835,6 +842,19 @@ TEST_F(UtestCaffeParser, CaffeWeightsParser_ConvertLayerParameter_test)
{
CaffeWeightsParser weightParser;
ge::ComputeGraphPtr compute_graph = ge::parser::MakeShared<ge::ComputeGraph>("tmp_graph");
auto tensor_desc = std::make_shared<GeTensorDesc>();
tensor_desc->SetShape(GeShape({1}));
tensor_desc->SetDataType(DT_FLOAT);
tensor_desc->SetFormat(FORMAT_CHWN);

auto op_desc = std::make_shared<OpDesc>("Abs", "Abs");
op_desc->AddInputDesc(tensor_desc->Clone());
auto node = compute_graph->AddNode(op_desc);
auto op_desc1 = std::make_shared<OpDesc>("Abs", "Abs");
op_desc1->AddInputDesc(tensor_desc->Clone());
auto nodeptr = compute_graph->AddNodeFront(node);

domi::caffe::NetParameter net;
ge::OpDescPtr op_desc_src = std::make_shared<ge::OpDesc>("Abs", "AbsVal");
domi::caffe::LayerParameter *layer = net.add_layer();
@@ -1142,9 +1162,17 @@ TEST_F(UtestCaffeParser, CaffeModelParser_ParseLayerParameter_test)
auto descriptor = importer.pool()->FindMessageTypeByName("domi.caffe.LayerParameter");
google::protobuf::DynamicMessageFactory factory;
const google::protobuf::Message *proto = factory.GetPrototype(descriptor);
const google::protobuf::Message *message = proto->New();
google::protobuf::Message *message = proto->New();
Status ret = modelParser.ParseLayerParameter(descriptor, message, operators);
EXPECT_EQ(ret, SUCCESS);

const domi::FrameworkType fmk_type = domi::TENSORFLOW;
const char_t *const pass_name = "PASS_NAME";
auto func = [&](){ return new (std::nothrow) RegisterPass();};
CreateFn create_fn = func;
ProtoTypePassRegistry::GetInstance().RegisterProtoTypePass(pass_name, create_fn, fmk_type);
ret = ProtoTypePassManager::Instance().Run(message, fmk_type);
EXPECT_EQ(ret, SUCCESS);
delete message;
}

@@ -1192,6 +1220,13 @@ TEST_F(UtestCaffeParser, CaffeWeightsParser_ReorderInput_test)
layer2->set_name("Data");
layer2->set_type("Input");
modelParser.ReorderInput(net);

std::vector<int32_t> idx_vector = {0,1,2,4};
ge::GetParserContext().out_nodes_map.insert(pair<std::string, std::vector<int32_t>>("add", idx_vector));
const string op_name = "add";
const int32_t index = 0;
bool ret = modelParser.IsOutputTop(op_name, index);
EXPECT_EQ(ret, true);
}

TEST_F(UtestCaffeParser, CaffeOpParser_ParseParms_test)


+ 41
- 0
tests/ut/parser/testcase/onnx_parser_testcase/onnx_parser_unittest.cc View File

@@ -218,6 +218,47 @@ TEST_F(UtestOnnxParser, onnx_parser_to_json) {
const char *model_null = nullptr;
ret = onnx_parser.ToJson(model_null, json_null);
EXPECT_EQ(ret, FAILED);

char *data = nullptr;
uint32_t size = 0;
ge::ComputeGraphPtr graph;
ret = onnx_parser.ParseFromMemory(data, size, graph);
EXPECT_EQ(ret, SUCCESS);

google::protobuf::Message *proto = nullptr;
ret = onnx_parser.ParseProto(proto, graph);
EXPECT_EQ(ret, SUCCESS);

domi::GetGraphCallback callback;
ret = onnx_parser.ParseProtoWithSubgraph(proto, callback, graph);
EXPECT_EQ(ret, SUCCESS);

ret = onnx_parser.ParseAllGraph(proto, graph);
EXPECT_EQ(ret, SUCCESS);

string file = "./";
ret = onnx_parser.Save(file);
EXPECT_NE(ret, SUCCESS);

bool ret1 = onnx_parser.HasError();
EXPECT_EQ(ret1, SUCCESS);
onnx_parser.Clear();

OnnxWeightsParser onnx_weight_parser;
char *file1 = nullptr;
ge::Graph graph1;
ret = onnx_weight_parser.Parse(file1, graph1);
EXPECT_EQ(ret, SUCCESS);

ret = onnx_weight_parser.ParseFromMemory(data, size, graph);
EXPECT_EQ(ret, SUCCESS);

ret1 = onnx_weight_parser.HasError();
EXPECT_EQ(ret1, SUCCESS);

ret = onnx_weight_parser.Save(file);
EXPECT_NE(ret, SUCCESS);
onnx_weight_parser.Clear();
}

TEST_F(UtestOnnxParser, onnx_parser_const_data_type) {


+ 42
- 5
tests/ut/parser/testcase/tensorflow_parser_testcase/tensorflow_parser_unittest.cc View File

@@ -1398,6 +1398,13 @@ TEST_F(UtestTensorflowParser, tensorflow_ParserProto_failed)
ASSERT_EQ(ret, PARAM_INVALID);
}

std::unique_ptr<google::protobuf::Message> getGraphCallback(const google::protobuf::Message *root_proto, const std::string &graph)
{
(void)root_proto;
(void)graph;
return nullptr;
}

TEST_F(UtestTensorflowParser, tensorflow_parserAllGraph_failed)
{
std::string caseDir = __FILE__;
@@ -1422,6 +1429,11 @@ TEST_F(UtestTensorflowParser, tensorflow_parserAllGraph_failed)
TensorFlowModelParser tensorflow_parser;
ret = tensorflow_parser.ParseAllGraph(reinterpret_cast<google::protobuf::Message *>(&graphDef), root_graph);
ASSERT_NE(ret, SUCCESS);

domi::GetGraphCallback callback(&getGraphCallback);
const auto message_root_proto = reinterpret_cast<google::protobuf::Message *>(&graphDef);
ret = tensorflow_parser.ParseProtoWithSubgraph(message_root_proto, callback, root_graph);
ASSERT_NE(ret, SUCCESS);
}

TEST_F(UtestTensorflowParser, test_parse_acl_output_nodes)
@@ -3768,6 +3780,8 @@ TEST_F(UtestTensorflowParser, tensorflow_tbe_tfplugin_loader_test)
pluginLoad.ProcessSoFullName(fileList, caffeParserPath, full_name, caffe_parser_so_suff);
ASSERT_EQ(caffeParserPath, full_name);

void *p = (void*)malloc(sizeof(int));
pluginLoad.handles_vec_.push_back(p);
pluginLoad.ClearHandles_();

std::cout << __FILE__ << std::endl;
@@ -4630,7 +4644,7 @@ TEST_F(UtestTensorflowParser, tensorflow_SoftmaxAddAttr)

TEST_F(UtestTensorflowParser, tensorflow_InferInputFormats)
{
domiTensorFormat_t ret;
domiTensorFormat_t ret2;
TensorFlowModelParser modelParser;

GetParserContext().format = DOMI_TENSOR_RESERVED;
@@ -4638,15 +4652,38 @@ TEST_F(UtestTensorflowParser, tensorflow_InferInputFormats)
NodeDef *node = MallocNodeDef("node", "DATA");
modelParser.nodedef_map_["node"] = node;
tensorflow_op_map["DATA"] = "node";
ret = modelParser.InferInputFormats();
EXPECT_EQ(ret, domi::DOMI_TENSOR_NHWC);
ret2 = modelParser.InferInputFormats();
EXPECT_EQ(ret2, domi::DOMI_TENSOR_NHWC);
delete node;
NodeDef* node1 = nullptr;
modelParser.nodedef_map_["node"] = node1;

ret = modelParser.InferInputFormats();
EXPECT_EQ(ret, domi::DOMI_TENSOR_RESERVED);
ret2 = modelParser.InferInputFormats();
EXPECT_EQ(ret2, domi::DOMI_TENSOR_RESERVED);

char *data = nullptr;
uint32_t size = 0;
ge::Graph graph;
Status ret = modelParser.ParseFromMemory(data, size, graph);
EXPECT_EQ(ret, SUCCESS);

string file = "./";
ret = modelParser.Save(file);
EXPECT_NE(ret, SUCCESS);

bool ret1 = modelParser.HasError();
EXPECT_EQ(ret1, SUCCESS);
modelParser.Clear();

TensorFlowWeightsParser tensorflow_weights_parser;
string file_path = "./";
ret = tensorflow_weights_parser.Save(file_path);
EXPECT_NE(ret, SUCCESS);

ret1 = tensorflow_weights_parser.HasError();
EXPECT_EQ(ret1, SUCCESS);
tensorflow_weights_parser.Clear();
}

TEST_F(UtestTensorflowParser, tensorflow_GetTransposeInfo)


Loading…
Cancel
Save