From da0a18c5e3f893faa587f4a3323f5a8652f04ada Mon Sep 17 00:00:00 2001 From: zhupuxu Date: Wed, 2 Dec 2020 14:09:07 +0800 Subject: [PATCH] fix codedex Signed-off-by: zhupuxu --- include/api/model.h | 1 - mindspore/ccsrc/cxx_api/model/acl/model_process.cc | 2 +- .../cxx_api/model/model_converter_utils/multi_process.cc | 2 -- .../cxx_api/model/model_converter_utils/multi_process.h | 2 -- .../cxx_api/model/model_converter_utils/shared_memory.cc | 2 -- .../cxx_api/model/model_converter_utils/shared_memory.h | 2 -- mindspore/ccsrc/cxx_api/model/ms/ms_model.cc | 4 +--- mindspore/ccsrc/cxx_api/model/ms/ms_model.h | 1 - mindspore/lite/nnacl/fp16/batchnorm_fp16.c | 8 ++++---- 9 files changed, 6 insertions(+), 18 deletions(-) diff --git a/include/api/model.h b/include/api/model.h index 6378d45a9f..dffa73db89 100644 --- a/include/api/model.h +++ b/include/api/model.h @@ -58,7 +58,6 @@ class MS_API Model { extern MS_API const char* kDeviceTypeAscendCL; extern MS_API const char* kDeviceTypeAscendMS; - } // namespace api } // namespace mindspore #endif // MINDSPORE_INCLUDE_API_MODEL_H diff --git a/mindspore/ccsrc/cxx_api/model/acl/model_process.cc b/mindspore/ccsrc/cxx_api/model/acl/model_process.cc index eb2afc1095..9df51e1afd 100644 --- a/mindspore/ccsrc/cxx_api/model/acl/model_process.cc +++ b/mindspore/ccsrc/cxx_api/model/acl/model_process.cc @@ -307,7 +307,7 @@ Status ModelProcess::CheckAndInitInput(const std::map &inpu const auto &input = iter->second; const void *data = input.Data(); - void *input_buffer; + void *input_buffer = nullptr; if (!is_run_on_device_) { ret = aclrtMemcpy(info.device_data, info.buffer_size, data, input.DataSize(), ACL_MEMCPY_HOST_TO_DEVICE); if (ret != ACL_ERROR_NONE) { diff --git a/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.cc b/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.cc index 9e8866d801..c56ef354d0 100644 --- a/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.cc +++ b/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.cc @@ -25,7 +25,6 @@ namespace mindspore { namespace api { - namespace { uint64_t kSharedMemorySize = 100ull << 20; // 100 MB } @@ -63,7 +62,6 @@ Status MultiProcess::MainProcess(ProcessFuncCall parent_process, ProcessFuncCall } shmat_data_addr_ = shmat_addr_ + sizeof(MessageFlag) * 2; shmat_data_max_size_ = memory_size_ - (shmat_data_addr_ - shmat_addr_); - MS_LOG_INFO << "Shm addr " << (uint64_t)shmat_addr_; if (pid == 0) { ChildProcess(child_process); diff --git a/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.h b/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.h index ec384912fe..a31d9f0a3b 100644 --- a/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.h +++ b/mindspore/ccsrc/cxx_api/model/model_converter_utils/multi_process.h @@ -22,7 +22,6 @@ namespace mindspore { namespace api { - struct MessageFlag { uint64_t heartbeat = 0; uint64_t stop = false; @@ -61,7 +60,6 @@ class MultiProcess { Status ParentProcess(ProcessFuncCall parent_process); void ChildProcess(ProcessFuncCall child_process); }; - } // namespace api } // namespace mindspore diff --git a/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.cc b/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.cc index 46446f65a0..09dabe0f1d 100644 --- a/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.cc +++ b/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.cc @@ -21,7 +21,6 @@ namespace mindspore { namespace api { - Status SharedMemory::Create(uint64_t memory_size) { auto access_mode = S_IRUSR | S_IWUSR | S_IROTH | S_IWOTH | S_IRGRP | S_IWGRP; shm_id_ = shmget(IPC_PRIVATE, memory_size, IPC_CREAT | IPC_EXCL | access_mode); @@ -64,6 +63,5 @@ void SharedMemory::Destroy() { MS_LOG_ERROR << errMsg; } } - } // namespace api } // namespace mindspore diff --git a/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.h b/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.h index b79b3ff6a7..77c9423d58 100644 --- a/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.h +++ b/mindspore/ccsrc/cxx_api/model/model_converter_utils/shared_memory.h @@ -21,7 +21,6 @@ namespace mindspore { namespace api { - class SharedMemory { public: Status Create(uint64_t memory_size); @@ -34,7 +33,6 @@ class SharedMemory { int shm_id_ = -1; uint8_t *shmat_addr_ = nullptr; }; - } // namespace api } // namespace mindspore diff --git a/mindspore/ccsrc/cxx_api/model/ms/ms_model.cc b/mindspore/ccsrc/cxx_api/model/ms/ms_model.cc index d136f3e962..f6dd6828c2 100644 --- a/mindspore/ccsrc/cxx_api/model/ms/ms_model.cc +++ b/mindspore/ccsrc/cxx_api/model/ms/ms_model.cc @@ -41,7 +41,6 @@ using std::vector; namespace py = pybind11; namespace mindspore { namespace api { - MsModel::MsModel(uint32_t device_id) : device_id_(device_id) {} MsModel::~MsModel() = default; @@ -320,7 +319,7 @@ void MsModel::RegAllOp() { } py::module c_expression = py::module::import("mindspore._c_expression"); size_t ops_info_long = c_expression.attr("OpInfoLoaderPy")().attr("get_all_ops_info")().cast(); - auto all_ops_info = reinterpret_cast *>(ops_info_long); + auto all_ops_info = reinterpret_cast *>(static_cast(ops_info_long)); for (auto op_info : *all_ops_info) { kernel::OpLib::RegOpInfo(std::shared_ptr(op_info)); } @@ -414,6 +413,5 @@ Status MsModel::GetOutputsInfo(std::vector *tensor_list) const { } return SUCCESS; } - } // namespace api } // namespace mindspore diff --git a/mindspore/ccsrc/cxx_api/model/ms/ms_model.h b/mindspore/ccsrc/cxx_api/model/ms/ms_model.h index de1dc4de85..161aae5c20 100644 --- a/mindspore/ccsrc/cxx_api/model/ms/ms_model.h +++ b/mindspore/ccsrc/cxx_api/model/ms/ms_model.h @@ -79,7 +79,6 @@ class MsModel : public ModelImpl { }; API_REG_MODEL(AscendMS, MsModel); - } // namespace api } // namespace mindspore #endif // MINDSPORE_CCSRC_SESSION_SESSION_BASIC_H diff --git a/mindspore/lite/nnacl/fp16/batchnorm_fp16.c b/mindspore/lite/nnacl/fp16/batchnorm_fp16.c index facae90c3f..bae64529bc 100644 --- a/mindspore/lite/nnacl/fp16/batchnorm_fp16.c +++ b/mindspore/lite/nnacl/fp16/batchnorm_fp16.c @@ -17,8 +17,8 @@ #include "nnacl/fp16/batchnorm_fp16.h" #include -void BatchNormFp16(const float16_t *input, const void *mean, const void *variance, - BatchNormParameter *param, int task_id, float16_t *output) { +void BatchNormFp16(const float16_t *input, const void *mean, const void *variance, BatchNormParameter *param, + int task_id, float16_t *output) { int units_per_thread = UP_DIV(param->unit_, param->op_parameter_.thread_num_); int completed_units = task_id * units_per_thread; int cur_unit = MSMIN(units_per_thread, param->unit_ - completed_units); @@ -47,9 +47,9 @@ void FusedBatchNormFp16(const void *input, const void *scale, const void *offset float16_t variance_sqrt = sqrt(((const float16_t *)variance)[c] + param->epsilon_); if (variance_sqrt != 0) { float16_t norm_val = - (((const float16_t *)input)[cur_offset + c] - ((const float16_t *)mean)[c]) / variance_sqrt; + (((const float16_t *)input)[cur_offset + c] - ((const float16_t *)mean)[c]) / variance_sqrt; ((float16_t *)output)[cur_offset + c] = - norm_val * ((const float16_t *)scale)[c] + ((const float16_t *)offset)[c]; + norm_val * ((const float16_t *)scale)[c] + ((const float16_t *)offset)[c]; } } cur_offset += param->channel_;