Browse Source

modify return type of Model::Import from std::shared_ptr<Model> to Model

*
tags/v0.7.0-beta
hangq 5 years ago
parent
commit
ca6c84b806
9 changed files with 35 additions and 25 deletions
  1. +1
    -1
      build.sh
  2. +4
    -4
      mindspore/lite/include/model.h
  3. +7
    -4
      mindspore/lite/src/model.cc
  4. +2
    -2
      mindspore/lite/src/model_impl.cc
  5. +1
    -1
      mindspore/lite/src/model_impl.h
  6. +3
    -3
      mindspore/lite/test/ut/src/infer_test.cc
  7. +13
    -8
      mindspore/lite/tools/benchmark/benchmark.cc
  8. +1
    -1
      mindspore/lite/tools/converter/quantizer/post_training.cc
  9. +3
    -1
      mindspore/lite/tools/time_profile/time_profile.cc

+ 1
- 1
build.sh View File

@@ -397,7 +397,7 @@ checkndk() {
if [ "${ANDROID_NDK}" ]; then
echo -e "\e[31mANDROID_NDK_PATH=$ANDROID_NDK \e[0m"
else
echo -e "\e[31mplease set ANDROID_NDK_PATH in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r20b/ \e[0m"
echo -e "\e[31mplease set ANDROID_NDK in environment variable for example: export ANDROID_NDK=/root/usr/android-ndk-r20b/ \e[0m"
exit 1
fi
}


+ 4
- 4
mindspore/lite/include/model.h View File

@@ -45,7 +45,7 @@ class MS_API Model {
/// \param[in] size Define bytes numbers of model buffer.
///
/// \return Pointer of MindSpore Lite Model.
static std::shared_ptr<Model> Import(const char *model_buf, size_t size);
static Model *Import(const char *model_buf, size_t size);

/// \brief Constructor of MindSpore Lite Model using default value for parameters.
///
@@ -53,7 +53,7 @@ class MS_API Model {
Model() = default;

/// \brief Destructor of MindSpore Lite Model.
virtual ~Model() = default;
virtual ~Model();

/// \brief Get MindSpore Lite Primitive by name.
///
@@ -70,13 +70,13 @@ class MS_API Model {
/// \brief Get MindSpore Lite ModelImpl.
///
/// \return A pointer of MindSpore Lite ModelImpl.
std::shared_ptr<ModelImpl> model_impl();
ModelImpl *model_impl();

/// \brief Free MetaGraph in MindSpore Lite Model.
void FreeMetaGraph();

protected:
std::shared_ptr<ModelImpl> model_impl_ = nullptr;
ModelImpl *model_impl_ = nullptr;
};

/// \brief ModelBuilder defined by MindSpore Lite.


+ 7
- 4
mindspore/lite/src/model.cc View File

@@ -24,12 +24,16 @@

namespace mindspore::lite {

std::shared_ptr<Model> Model::Import(const char *model_buf, size_t size) {
auto model = std::make_shared<Model>();
Model *Model::Import(const char *model_buf, size_t size) {
auto model = new Model();
model->model_impl_ = ModelImpl::Import(model_buf, size);
return model;
}

Model::~Model() {
delete(this->model_impl_);
}

lite::Primitive *Model::GetOp(const std::string &name) const {
MS_EXCEPTION_IF_NULL(model_impl_);
return const_cast<Primitive *>(model_impl_->GetOp(name));
@@ -45,9 +49,8 @@ const schema::MetaGraph *Model::GetMetaGraph() const {
return model_impl_->GetMetaGraph();
}

std::shared_ptr<ModelImpl> Model::model_impl() {
ModelImpl *Model::model_impl() {
MS_EXCEPTION_IF_NULL(model_impl_);
return this->model_impl_;
}
} // namespace mindspore::lite


+ 2
- 2
mindspore/lite/src/model_impl.cc View File

@@ -20,7 +20,7 @@
#include "utils/log_adapter.h"

namespace mindspore::lite {
std::shared_ptr<ModelImpl> ModelImpl::Import(const char *model_buf, size_t size) {
ModelImpl *ModelImpl::Import(const char *model_buf, size_t size) {
MS_EXCEPTION_IF_NULL(model_buf);
flatbuffers::Verifier verify((const uint8_t *)model_buf, size);
if (!schema::VerifyMetaGraphBuffer(verify)) {
@@ -33,7 +33,7 @@ std::shared_ptr<ModelImpl> ModelImpl::Import(const char *model_buf, size_t size)
return nullptr;
}
memcpy(inner_model_buf, model_buf, size);
auto model = std::make_shared<ModelImpl>(inner_model_buf, size);
auto model = new (std::nothrow) ModelImpl(inner_model_buf, size);
if (model == nullptr) {
MS_LOG(ERROR) << "Create modelImpl failed";
return nullptr;


+ 1
- 1
mindspore/lite/src/model_impl.h View File

@@ -27,7 +27,7 @@ namespace mindspore {
namespace lite {
class ModelImpl {
public:
static std::shared_ptr<ModelImpl> Import(const char *model_buf, size_t size);
static ModelImpl *Import(const char *model_buf, size_t size);
ModelImpl() = default;
explicit ModelImpl(const char *model_buf, size_t size) : model_buf_(model_buf), buf_size_(size) {
meta_graph = schema::GetMetaGraph(model_buf);


+ 3
- 3
mindspore/lite/test/ut/src/infer_test.cc View File

@@ -109,7 +109,7 @@ TEST_F(InferTest, TestConvNode) {
context->thread_num_ = 4;
auto session = session::LiteSession::CreateSession(context);
ASSERT_NE(nullptr, session);
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
ASSERT_EQ(lite::RET_OK, ret);
auto inputs = session->GetInputs();
ASSERT_EQ(inputs.size(), 1);
@@ -206,7 +206,7 @@ TEST_F(InferTest, TestAddNode) {
context->thread_num_ = 4;
auto session = session::LiteSession::CreateSession(context);
ASSERT_NE(nullptr, session);
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
ASSERT_EQ(lite::RET_OK, ret);
auto inputs = session->GetInputs();
ASSERT_EQ(inputs.size(), 2);
@@ -257,7 +257,7 @@ TEST_F(InferTest, TestModel) {
context->thread_num_ = 4;
auto session = session::LiteSession::CreateSession(context);
ASSERT_NE(nullptr, session);
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
ASSERT_EQ(lite::RET_OK, ret);
auto inputs = session->GetInputs();
ASSERT_EQ(inputs.size(), 1);


+ 13
- 8
mindspore/lite/tools/benchmark/benchmark.cc View File

@@ -371,7 +371,7 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
return RET_ERROR;
}
delete[](graphBuf);
auto context = new(std::nothrow) lite::Context;
auto context = new (std::nothrow) lite::Context;
if (context == nullptr) {
MS_LOG(ERROR) << "New context failed while running %s", modelName.c_str();
return RET_ERROR;
@@ -393,15 +393,16 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
}
context->thread_num_ = _flags->numThreads;
session = session::LiteSession::CreateSession(context);
delete(context);
delete (context);
if (session == nullptr) {
MS_LOG(ERROR) << "CreateSession failed while running %s", modelName.c_str();
return RET_ERROR;
}
auto ret = session->CompileGraph(model.get());
auto ret = session->CompileGraph(model);
if (ret != RET_OK) {
MS_LOG(ERROR) << "CompileGraph failed while running %s", modelName.c_str();
delete(session);
delete (session);
delete (model);
return ret;
}
msInputs = session->GetInputs();
@@ -419,21 +420,24 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
auto status = LoadInput();
if (status != 0) {
MS_LOG(ERROR) << "Generate input data error";
delete(session);
delete (session);
delete (model);
return status;
}
if (!_flags->calibDataPath.empty()) {
status = MarkAccuracy();
if (status != 0) {
MS_LOG(ERROR) << "Run MarkAccuracy error: %d" << status;
delete(session);
delete (session);
delete (model);
return status;
}
} else {
status = MarkPerformance();
if (status != 0) {
MS_LOG(ERROR) << "Run MarkPerformance error: %d" << status;
delete(session);
delete (session);
delete (model);
return status;
}
}
@@ -447,7 +451,8 @@ int Benchmark::RunBenchmark(const std::string &deviceType) {
calibData.clear();
}

delete(session);
delete (session);
delete (model);
return RET_OK;
}



+ 1
- 1
mindspore/lite/tools/converter/quantizer/post_training.cc View File

@@ -920,7 +920,7 @@ STATUS PostTrainingQuantizer::DoQuantize(FuncGraphPtr funcGraph) {
return RET_ERROR;
}

auto ret = session_->CompileGraph(model.get());
auto ret = session_->CompileGraph(model);
if (ret != lite::RET_OK) {
MS_LOG(ERROR) << "compile graph error";
return RET_ERROR;


+ 3
- 1
mindspore/lite/tools/time_profile/time_profile.cc View File

@@ -278,7 +278,7 @@ int TimeProfile::RunTimeProfile() {
}
auto model = lite::Model::Import(graphBuf, size);

auto ret = session_->CompileGraph(model.get());
auto ret = session_->CompileGraph(model);
if (ret != RET_OK) {
MS_LOG(ERROR) << "Compile graph failed.";
return RET_ERROR;
@@ -336,6 +336,8 @@ int TimeProfile::RunTimeProfile() {
}
ms_inputs_.clear();
delete graphBuf;
delete session_;
delete model;
return ret;
}



Loading…
Cancel
Save