From 5c3381da9b2af8507064d798073ce19e1029e487 Mon Sep 17 00:00:00 2001 From: yefeng Date: Sat, 26 Dec 2020 16:11:42 +0800 Subject: [PATCH] 024-issue-onnx_cast_006.onnx-3 --- mindspore/lite/src/inner_context.cc | 6 ++++++ mindspore/lite/src/sub_graph_kernel.cc | 6 +++--- mindspore/lite/tools/benchmark/benchmark.cc | 11 ++++++++++- 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/mindspore/lite/src/inner_context.cc b/mindspore/lite/src/inner_context.cc index 184ed79628..6f84c03056 100644 --- a/mindspore/lite/src/inner_context.cc +++ b/mindspore/lite/src/inner_context.cc @@ -51,6 +51,12 @@ int InnerContext::Init() { return RET_NULL_PTR; } } + if (IsNpuEnabled()) { + MS_LOG(DEBUG) << "NPU enabled."; + } + if (IsGpuEnabled()) { + MS_LOG(DEBUG) << "GPU enabled."; + } return RET_OK; } diff --git a/mindspore/lite/src/sub_graph_kernel.cc b/mindspore/lite/src/sub_graph_kernel.cc index b4cc23e3f2..ff7369d3d0 100644 --- a/mindspore/lite/src/sub_graph_kernel.cc +++ b/mindspore/lite/src/sub_graph_kernel.cc @@ -288,10 +288,10 @@ int CpuFp16SubGraph::PostProcess() { for (size_t i = 0; i < this->in_tensors_.size(); i++) { auto tensor = in_tensors_.at(i); MS_ASSERT(tensor != nullptr); - if (tensor->data_type() == kNumberTypeFloat16) { + auto origin_tensor_data = origin_input_data_.at(i); + if (tensor->data_type() == kNumberTypeFloat16 && origin_tensor_data != nullptr) { + MS_ASSERT(tensor != nullptr); tensor->FreeData(); - auto origin_tensor_data = origin_input_data_.at(i); - MS_ASSERT(origin_tensor_data != nullptr); MS_ASSERT(origin_tensor_data->data_ != nullptr); tensor->set_data(origin_tensor_data->data_); tensor->set_data_type(kNumberTypeFloat32); diff --git a/mindspore/lite/tools/benchmark/benchmark.cc b/mindspore/lite/tools/benchmark/benchmark.cc index 018760ea18..3a035fdd0b 100644 --- a/mindspore/lite/tools/benchmark/benchmark.cc +++ b/mindspore/lite/tools/benchmark/benchmark.cc @@ -686,7 +686,16 @@ int Benchmark::Init() { MS_LOG(INFO) << "NumThreads = " << this->flags_->num_threads_; MS_LOG(INFO) << "Fp16Priority = " << this->flags_->enable_fp16_; MS_LOG(INFO) << "calibDataPath = " << this->flags_->benchmark_data_file_; - + std::cout << "ModelPath = " << this->flags_->model_file_ << std::endl; + std::cout << "InDataPath = " << this->flags_->in_data_file_ << std::endl; + std::cout << "InDataType = " << this->flags_->in_data_type_in_ << std::endl; + std::cout << "LoopCount = " << this->flags_->loop_count_ << std::endl; + std::cout << "DeviceType = " << this->flags_->device_ << std::endl; + std::cout << "AccuracyThreshold = " << this->flags_->accuracy_threshold_ << std::endl; + std::cout << "WarmUpLoopCount = " << this->flags_->warm_up_loop_count_ << std::endl; + std::cout << "NumThreads = " << this->flags_->num_threads_ << std::endl; + std::cout << "Fp16Priority = " << this->flags_->enable_fp16_ << std::endl; + std::cout << "calibDataPath = " << this->flags_->benchmark_data_file_ << std::endl; if (this->flags_->loop_count_ < 1) { MS_LOG(ERROR) << "LoopCount:" << this->flags_->loop_count_ << " must be greater than 0"; std::cerr << "LoopCount:" << this->flags_->loop_count_ << " must be greater than 0" << std::endl;