From f49ae2cffd092116a8a13a9213daffedf214c4f1 Mon Sep 17 00:00:00 2001 From: yeyunpeng Date: Thu, 10 Dec 2020 10:24:42 +0800 Subject: [PATCH] add npu ops --- .../runtime/agent/npu/npu_converter_utils.cc | 18 +++++ .../runtime/agent/npu/npu_converter_utils.h | 2 + .../runtime/agent/npu/subgraph_npu_kernel.cc | 11 +-- .../lite/src/runtime/kernel/npu/add_npu.cc | 19 +++-- .../lite/src/runtime/kernel/npu/add_npu.h | 17 ++-- .../lite/src/runtime/kernel/npu/concat_npu.cc | 16 ++-- .../lite/src/runtime/kernel/npu/concat_npu.h | 22 +++--- .../lite/src/runtime/kernel/npu/div_npu.cc | 18 +++-- .../lite/src/runtime/kernel/npu/div_npu.h | 17 ++-- .../src/runtime/kernel/npu/eltwise_npu.cc | 58 ++++++++++++++ .../lite/src/runtime/kernel/npu/eltwise_npu.h | 48 ++++++++++++ .../lite/src/runtime/kernel/npu/floor_npu.cc | 12 ++- .../lite/src/runtime/kernel/npu/floor_npu.h | 17 ++-- .../lite/src/runtime/kernel/npu/mul_npu.cc | 16 ++-- .../lite/src/runtime/kernel/npu/mul_npu.h | 17 ++-- .../lite/src/runtime/kernel/npu/npu_kernel.h | 19 ++--- .../src/runtime/kernel/npu/reshape_npu.cc | 14 +++- .../lite/src/runtime/kernel/npu/reshape_npu.h | 17 ++-- .../lite/src/runtime/kernel/npu/resize_npu.cc | 78 +++++++++++++++++++ .../lite/src/runtime/kernel/npu/resize_npu.h | 57 ++++++++++++++ .../lite/src/runtime/kernel/npu/scale_npu.cc | 13 +++- .../lite/src/runtime/kernel/npu/scale_npu.h | 17 ++-- .../src/runtime/kernel/npu/softmax_npu.cc | 14 +++- .../lite/src/runtime/kernel/npu/softmax_npu.h | 17 ++-- .../lite/src/runtime/kernel/npu/sub_npu.cc | 57 ++++++++++++++ .../lite/src/runtime/kernel/npu/sub_npu.h | 41 ++++++++++ mindspore/lite/src/scheduler.cc | 4 +- 27 files changed, 539 insertions(+), 117 deletions(-) create mode 100644 mindspore/lite/src/runtime/kernel/npu/eltwise_npu.cc create mode 100644 mindspore/lite/src/runtime/kernel/npu/eltwise_npu.h create mode 100644 mindspore/lite/src/runtime/kernel/npu/resize_npu.cc create mode 100644 mindspore/lite/src/runtime/kernel/npu/resize_npu.h create mode 100644 mindspore/lite/src/runtime/kernel/npu/sub_npu.cc create mode 100644 mindspore/lite/src/runtime/kernel/npu/sub_npu.h diff --git a/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.cc b/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.cc index 0e484846a0..1388eb2931 100644 --- a/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.cc +++ b/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.cc @@ -157,4 +157,22 @@ int ConverterToNPUActMode(schema::ActivationType type) { return -1; } } +// mode : Either 0 (product), 1 (sum), 2 (max), 3 (mean). Defaults to 1 (sum). +int ConverterToNPUEltwiseMode(schema::EltwiseMode mode) { + int mode_num = 1; + switch (mode) { + case schema::EltwiseMode_PROD: + mode_num = 0; + break; + case schema::EltwiseMode_SUM: + mode_num = 1; + break; + case schema::EltwiseMode_MAXIMUM: + mode_num = 2; + break; + default: + MS_LOG(ERROR) << "Unsupport Eltwise mode."; + } + return mode_num; +} } // namespace mindspore::lite diff --git a/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.h b/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.h index 9f5cc3e420..19bfcac060 100644 --- a/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.h +++ b/mindspore/lite/src/runtime/agent/npu/npu_converter_utils.h @@ -38,5 +38,7 @@ ge::Shape ConverterToNPUShape(const std::vector &src_shape); int ConverterToNPUActMode(schema::ActivationType type); +int ConverterToNPUEltwiseMode(schema::EltwiseMode mode); + } // namespace mindspore::lite #endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_NPU_CONVERTER_UITLS_H_ diff --git a/mindspore/lite/src/runtime/agent/npu/subgraph_npu_kernel.cc b/mindspore/lite/src/runtime/agent/npu/subgraph_npu_kernel.cc index e24663716f..5d699524c5 100644 --- a/mindspore/lite/src/runtime/agent/npu/subgraph_npu_kernel.cc +++ b/mindspore/lite/src/runtime/agent/npu/subgraph_npu_kernel.cc @@ -22,13 +22,9 @@ #include "src/tensor.h" #include "include/graph/model.h" #include "include/hiai_ir_build.h" -#include "include/HiAiModelManagerService.h" #include "include/HiAiModelManagerType.h" #include "include/context.h" #include "include/version.h" -#include "include/graph/op/array_defs.h" -#include "src/common/file_utils.h" -#include "src/common/common.h" #include "src/common/utils.h" #include "src/runtime/agent/npu/npu_converter_utils.h" #include "mindspore/lite/src/runtime/kernel/npu/npu_kernel.h" @@ -129,7 +125,11 @@ int SubGraphNpuKernel::BuildNPUInputOp() { } } // set input to NPU - reinterpret_cast(node)->SetNPUInputs(node->in_tensors(), node->out_tensors(), node_input_op); + int ret = reinterpret_cast(node)->SetNPUInputs(node->in_tensors(), node->out_tensors(), node_input_op); + if (ret != RET_OK) { + MS_LOG(ERROR) << node->name() << " set npu inputs failed."; + return RET_ERROR; + } } return RET_OK; } @@ -146,6 +146,7 @@ std::vector SubGraphNpuKernel::GetNPUNodes(const vector &inputs, const std::vector &outputs, OpParameter *opParameter) { - if (inputs[0]->shape() != inputs[1]->shape()) { - MS_LOG(INFO) << "ddk 500 does not support broadcast." - << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); + if (inputs[0]->shape().size() != inputs[1]->shape().size()) { + MS_LOG(ERROR) << "For the two inputs, the corresponding dimensions must have the same value, or one of them is 1." + << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); return RET_ERROR; } return RET_OK; } -void AddNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) { - op_ = new hiai::op::Add(name_); + +int AddNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Add(name_); + if (op_ == nullptr) { + return RET_ERROR; + } op_->set_input_x1(*npu_inputs[0]); op_->set_input_x2(*npu_inputs[1]); + return RET_OK; } + ge::Operator *mindspore::kernel::AddNPUKernel::GetNPUOp() { return this->op_; } + AddNPUKernel::~AddNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/add_npu.h b/mindspore/lite/src/runtime/kernel/npu/add_npu.h index bf84cd059d..7cafc77fc7 100644 --- a/mindspore/lite/src/runtime/kernel/npu/add_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/add_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_ADD_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_ADD_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_ADD_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_ADD_NPU_H_ #include #include "src/runtime/kernel/npu/npu_kernel.h" #include "include/graph/op/math_defs.h" @@ -23,19 +23,20 @@ namespace mindspore::kernel { class AddNPUKernel : public NPUKernel { public: AddNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) {} + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) {} ~AddNPUKernel() override; int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::Add *op_; + hiai::op::Add *op_ = nullptr; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_ADD_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_ADD_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/concat_npu.cc b/mindspore/lite/src/runtime/kernel/npu/concat_npu.cc index 055d554fa8..4b776d7dc0 100644 --- a/mindspore/lite/src/runtime/kernel/npu/concat_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/concat_npu.cc @@ -26,18 +26,24 @@ int ConcatNPUKernel::IsSupport(const std::vector &inputs, const OpParameter *opParameter) { return RET_OK; } -void ConcatNPUKernel::SetNPUInputs(const std::vector &inputs, - const std::vector &outputs, - const std::vector &npu_inputs) { - op_ = new hiai::op::ConcatD(name_); - op_->set_attr_concat_dim(concat_parameter_->axis_); + +int ConcatNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::ConcatD(name_); + if (op_ == nullptr) { + return RET_ERROR; + } + op_->set_attr_concat_dim(axis_); op_->set_attr_N(npu_inputs.size()); op_->create_dynamic_input_x(npu_inputs.size()); for (int i = 0; i < npu_inputs.size(); ++i) { op_->set_dynamic_input_x(i + 1, *npu_inputs[i]); } + return RET_OK; } + ge::Operator *mindspore::kernel::ConcatNPUKernel::GetNPUOp() { return this->op_; } + ConcatNPUKernel::~ConcatNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/concat_npu.h b/mindspore/lite/src/runtime/kernel/npu/concat_npu.h index 825ccfbdd8..a61344a376 100644 --- a/mindspore/lite/src/runtime/kernel/npu/concat_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/concat_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_CONCAT_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_CONCAT_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_CONCAT_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_CONCAT_NPU_H_ #include #include "nnacl/concat_parameter.h" #include "src/runtime/kernel/npu/npu_kernel.h" @@ -24,21 +24,23 @@ namespace mindspore::kernel { class ConcatNPUKernel : public NPUKernel { public: ConcatNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) { - concat_parameter_ = reinterpret_cast(parameter); + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) { + auto concat_parameter = reinterpret_cast(parameter); + axis_ = concat_parameter->axis_; } ~ConcatNPUKernel() override; int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::ConcatD *op_; - ConcatParameter *concat_parameter_; + hiai::op::ConcatD *op_ = nullptr; + int axis_; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_CONCAT_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_CONCAT_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/div_npu.cc b/mindspore/lite/src/runtime/kernel/npu/div_npu.cc index 22ff4a5548..233c2c80bc 100644 --- a/mindspore/lite/src/runtime/kernel/npu/div_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/div_npu.cc @@ -25,21 +25,27 @@ using mindspore::schema::PrimitiveType_Div; namespace mindspore::kernel { int DivNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) { - op_ = new hiai::op::RealDiv(name_); - if (inputs[0]->shape() != inputs[1]->shape()) { - MS_LOG(INFO) << "ddk 500 does not support broadcast." - << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); + MS_LOG(ERROR) << "For the two inputs, the corresponding dimensions must have the same value, or one of them is 1." + << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); return RET_ERROR; } return RET_OK; } -void DivNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) { + +int DivNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::RealDiv(name_); + if (op_ == nullptr) { + return RET_ERROR; + } op_->set_input_x1(*npu_inputs[0]); op_->set_input_x2(*npu_inputs[1]); + return RET_OK; } + ge::Operator *mindspore::kernel::DivNPUKernel::GetNPUOp() { return this->op_; } + DivNPUKernel::~DivNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/div_npu.h b/mindspore/lite/src/runtime/kernel/npu/div_npu.h index a117d06833..e50d4b2f38 100644 --- a/mindspore/lite/src/runtime/kernel/npu/div_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/div_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_DIV_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_DIV_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_DIV_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_DIV_NPU_H_ #include #include "src/runtime/kernel/npu/npu_kernel.h" #include "include/graph/op/math_defs.h" @@ -23,18 +23,19 @@ namespace mindspore::kernel { class DivNPUKernel : public NPUKernel { public: DivNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) {} + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) {} ~DivNPUKernel() override; int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::RealDiv *op_; + hiai::op::RealDiv *op_ = nullptr; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_DIV_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_DIV_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/eltwise_npu.cc b/mindspore/lite/src/runtime/kernel/npu/eltwise_npu.cc new file mode 100644 index 0000000000..6761153a3b --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/npu/eltwise_npu.cc @@ -0,0 +1,58 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "src/runtime/kernel/npu/eltwise_npu.h" +#include "include/graph/op/all_ops.h" +#include "src/kernel_registry.h" +#include "src/runtime/agent/npu/npu_converter_utils.h" + +using mindspore::kernel::KERNEL_ARCH::kNPU; +using mindspore::lite::KernelRegistrar; +using mindspore::schema::PrimitiveType_Eltwise; + +namespace mindspore::kernel { +int EltwiseNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, + OpParameter *opParameter) { + return RET_OK; +} + +int EltwiseNPUKernel::SetNPUInputs(const std::vector &inputs, + const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Eltwise(name_); + if (op_ == nullptr) { + return RET_ERROR; + } + op_->set_attr_mode(lite::ConverterToNPUEltwiseMode(mode_)); + int size = npu_inputs.size(); + op_->create_dynamic_input_x(size); + op_->set_attr_N(size); + for (int i = 0; i < size; ++i) { + op_->set_dynamic_input_x(i + 1, *npu_inputs[i]); + } + return RET_OK; +} + +ge::Operator *mindspore::kernel::EltwiseNPUKernel::GetNPUOp() { return this->op_; } + +EltwiseNPUKernel::~EltwiseNPUKernel() { + if (op_ != nullptr) { + delete op_; + op_ = nullptr; + } +} +REG_KERNEL(kNPU, kNumberTypeFloat32, PrimitiveType_Eltwise, NPUKernelCreator) +} // namespace mindspore::kernel diff --git a/mindspore/lite/src/runtime/kernel/npu/eltwise_npu.h b/mindspore/lite/src/runtime/kernel/npu/eltwise_npu.h new file mode 100644 index 0000000000..90df10b7bf --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/npu/eltwise_npu.h @@ -0,0 +1,48 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_ELTWISE_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_ELTWISE_NPU_H_ +#include +#include "src/ops/eltwise.h" +#include "nnacl/arithmetic_common.h" +#include "src/runtime/kernel/npu/npu_kernel.h" +#include "include/graph/op/all_ops.h" +namespace mindspore::kernel { +class EltwiseNPUKernel : public NPUKernel { + public: + EltwiseNPUKernel(OpParameter *parameter, const std::vector &inputs, + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) { + auto eltwise = reinterpret_cast(primitive); + mode_ = static_cast(eltwise->GetMode()); + } + ~EltwiseNPUKernel() override; + + int IsSupport(const std::vector &inputs, const std::vector &outputs, + OpParameter *opParameter) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; + + ge::Operator *GetNPUOp() override; + + private: + hiai::op::Eltwise *op_ = nullptr; + schema::EltwiseMode mode_; +}; +} // namespace mindspore::kernel +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_ELTWISE_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/floor_npu.cc b/mindspore/lite/src/runtime/kernel/npu/floor_npu.cc index fecb745dee..1a664dfd99 100644 --- a/mindspore/lite/src/runtime/kernel/npu/floor_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/floor_npu.cc @@ -26,13 +26,19 @@ int FloorNPUKernel::IsSupport(const std::vector &inputs, const s OpParameter *opParameter) { return RET_OK; } -void FloorNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) { - op_ = new hiai::op::Floor(name_); +int FloorNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Floor(name_); + if (op_ == nullptr) { + return RET_ERROR; + } op_->set_input_x(*npu_inputs[0]); + return REK_OK; } + ge::Operator *mindspore::kernel::FloorNPUKernel::GetNPUOp() { return this->op_; } + FloorNPUKernel::~FloorNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/floor_npu.h b/mindspore/lite/src/runtime/kernel/npu/floor_npu.h index 676c48d8b4..e63e5aeca9 100644 --- a/mindspore/lite/src/runtime/kernel/npu/floor_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/floor_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_FLOOR_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_FLOOR_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_FLOOR_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_FLOOR_NPU_H_ #include #include "src/runtime/kernel/npu/npu_kernel.h" #include "include/graph/op/math_defs.h" @@ -23,18 +23,19 @@ namespace mindspore::kernel { class FloorNPUKernel : public NPUKernel { public: FloorNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) {} + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) {} ~FloorNPUKernel() override; int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::Floor *op_; + hiai::op::Floor *op_ = nullptr; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_FLOOR_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_FLOOR_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/mul_npu.cc b/mindspore/lite/src/runtime/kernel/npu/mul_npu.cc index 73a38651c1..789d2b4ce8 100644 --- a/mindspore/lite/src/runtime/kernel/npu/mul_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/mul_npu.cc @@ -26,20 +26,26 @@ namespace mindspore::kernel { int MulNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) { if (inputs[0]->shape() != inputs[1]->shape()) { - MS_LOG(INFO) << "ddk 500 does not support broadcast." - << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); + MS_LOG(ERROR) << "For the two inputs, the corresponding dimensions must have the same value." + << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); return RET_ERROR; } return RET_OK; } -void MulNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) { - op_ = new hiai::op::Mul(name_); +int MulNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Mul(name_); + if (op_ == nullptr) { + return RET_ERROR; + } op_->set_input_x1(*npu_inputs[0]); op_->set_input_x2(*npu_inputs[1]); + return RET_OK; } + ge::Operator *mindspore::kernel::MulNPUKernel::GetNPUOp() { return this->op_; } + MulNPUKernel::~MulNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/mul_npu.h b/mindspore/lite/src/runtime/kernel/npu/mul_npu.h index c40c240c60..3227f540d0 100644 --- a/mindspore/lite/src/runtime/kernel/npu/mul_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/mul_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_MUL_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_MUL_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_MUL_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_MUL_NPU_H_ #include #include "src/runtime/kernel/npu/npu_kernel.h" #include "include/graph/op/math_defs.h" @@ -23,18 +23,19 @@ namespace mindspore::kernel { class MulNPUKernel : public NPUKernel { public: MulNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) {} + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) {} ~MulNPUKernel() override; int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::Mul *op_; + hiai::op::Mul *op_ = nullptr; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_Mul_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_Mul_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h b/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h index cb9f26db2d..c1b85f4dd5 100644 --- a/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h +++ b/mindspore/lite/src/runtime/kernel/npu/npu_kernel.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_KERNEL_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_KERNEL_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_KERNEL_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_KERNEL_NPU_H_ #include #include "src/lite_kernel.h" @@ -30,8 +30,9 @@ namespace mindspore::kernel { class NPUKernel : public LiteKernel { public: NPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : LiteKernel(parameter, inputs, outputs, nullptr, nullptr) {} + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : LiteKernel(parameter, inputs, outputs, ctx, primitive) {} ~NPUKernel() override = default; int Run() override { return RET_ERROR; } @@ -43,16 +44,16 @@ class NPUKernel : public LiteKernel { virtual ge::Operator *GetNPUOp() = 0; - virtual void SetNPUInputs(const std::vector &inputs, - const std::vector &outputs, - const std::vector &npu_inputs) = 0; + virtual int SetNPUInputs(const std::vector &inputs, + const std::vector &outputs, + const std::vector &npu_inputs) = 0; }; template kernel::LiteKernel *NPUKernelCreator(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter, const lite::InnerContext *ctx, const kernel::KernelKey &desc, const mindspore::lite::PrimitiveC *primitive) { - auto *kernel = new (std::nothrow) T(opParameter, inputs, outputs); + auto *kernel = new (std::nothrow) T(opParameter, inputs, outputs, ctx, primitive); if (kernel == nullptr) { MS_LOG(ERROR) << "kernel " << opParameter->name_ << "is nullptr."; free(opParameter); @@ -66,4 +67,4 @@ kernel::LiteKernel *NPUKernelCreator(const std::vector &inputs, return kernel; } } // namespace mindspore::kernel -#endif // LITE_MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPUKERNEL_H_ +#endif // LITE_MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPUKERNEL_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/reshape_npu.cc b/mindspore/lite/src/runtime/kernel/npu/reshape_npu.cc index baeb248c53..388c0211d7 100644 --- a/mindspore/lite/src/runtime/kernel/npu/reshape_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/reshape_npu.cc @@ -27,15 +27,21 @@ int ReshapeNPUKernel::IsSupport(const std::vector &inputs, const OpParameter *opParameter) { return RET_OK; } -void ReshapeNPUKernel::SetNPUInputs(const std::vector &inputs, - const std::vector &outputs, - const std::vector &npu_inputs) { - op_ = new hiai::op::Reshape(name_); +int ReshapeNPUKernel::SetNPUInputs(const std::vector &inputs, + const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Reshape(name_); + if (op_ == nullptr) { + return RET_ERROR; + } op_->set_input_x(*npu_inputs[0]); op_->set_input_shape(*npu_inputs[1]); + return RET_OK; } + ge::Operator *mindspore::kernel::ReshapeNPUKernel::GetNPUOp() { return this->op_; } + ReshapeNPUKernel::~ReshapeNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/reshape_npu.h b/mindspore/lite/src/runtime/kernel/npu/reshape_npu.h index eca1cd490d..f6a199c88d 100644 --- a/mindspore/lite/src/runtime/kernel/npu/reshape_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/reshape_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_RESHAPE_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_RESHAPE_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_RESHAPE_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_RESHAPE_NPU_H_ #include #include "nnacl/conv_parameter.h" #include "src/runtime/kernel/npu/npu_kernel.h" @@ -24,18 +24,19 @@ namespace mindspore::kernel { class ReshapeNPUKernel : public NPUKernel { public: ReshapeNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) {} + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) {} ~ReshapeNPUKernel() override; int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::Reshape *op_; + hiai::op::Reshape *op_ = nullptr; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_RESHAPE_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_RESHAPE_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/resize_npu.cc b/mindspore/lite/src/runtime/kernel/npu/resize_npu.cc new file mode 100644 index 0000000000..616b269eca --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/npu/resize_npu.cc @@ -0,0 +1,78 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "src/runtime/kernel/npu/resize_npu.h" +#include +#include "include/graph/op/all_ops.h" +#include "src/kernel_registry.h" +#include "src/runtime/agent/npu/npu_converter_utils.h" + +using mindspore::kernel::KERNEL_ARCH::kNPU; +using mindspore::lite::KernelRegistrar; +using mindspore::schema::PrimitiveType_Resize; + +namespace mindspore::kernel { +int ResizeNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, + OpParameter *opParameter) { + if (method_ != schema::ResizeMethod_LINEAR || method_ == schema::ResizeMethod_NEAREST) { + MS_LOG(ERROR) << "Unsupported resize method type:" << method_; + return RET_ERROR; + } + return RET_OK; +} + +int ResizeNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + ge::TensorDesc sizeTensorDesc(ge::Shape({2}), ge::FORMAT_NCHW, ge::DT_INT32); + ge::TensorPtr sizeTensor = std::make_shared(sizeTensorDesc); + vector dataValue = {static_cast(new_height_), static_cast(new_width_)}; + sizeTensor->SetData(reinterpret_cast(dataValue.data()), 2 * sizeof(int32_t)); + auto out_size = new (std::nothrow) hiai::op::Const(name_ + "size"); + out_size->set_attr_value(sizeTensor); + + if (method_ == schema::ResizeMethod_LINEAR) { + auto op = new (std::nothrow) hiai::op::ResizeBilinearV2(name_); + if (op_ == nullptr) { + return RET_ERROR; + } + op->set_attr_align_corners(false); + op->set_input_x(*npu_inputs[0]); + op->set_input_size(*out_size); + op->set_attr_half_pixel_centers(true); + op_ = op; + } else { + auto op = new (std::nothrow) hiai::op::ResizeNearestNeighborV2(name_); + if (op_ == nullptr) { + return RET_ERROR; + } + op->set_attr_align_corners(align_corners_); + op->set_input_x(*npu_inputs[0]); + op->set_input_size(*out_size); + op_ = op; + } + return RET_OK; +} + +ge::Operator *mindspore::kernel::ResizeNPUKernel::GetNPUOp() { return this->op_; } + +ResizeNPUKernel::~ResizeNPUKernel() { + if (op_ != nullptr) { + delete op_; + op_ = nullptr; + } +} +REG_KERNEL(kNPU, kNumberTypeFloat32, PrimitiveType_Resize, NPUKernelCreator) +} // namespace mindspore::kernel diff --git a/mindspore/lite/src/runtime/kernel/npu/resize_npu.h b/mindspore/lite/src/runtime/kernel/npu/resize_npu.h new file mode 100644 index 0000000000..61188dff3d --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/npu/resize_npu.h @@ -0,0 +1,57 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_RESIZE_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_RESIZE_NPU_H_ +#include +#include "nnacl/resize_parameter.h" +#include "src/ops/resize.h" +#include "nnacl/arithmetic_common.h" +#include "src/runtime/kernel/npu/npu_kernel.h" +#include "include/graph/op/all_ops.h" +namespace mindspore::kernel { +class ResizeNPUKernel : public NPUKernel { + public: + ResizeNPUKernel(OpParameter *parameter, const std::vector &inputs, + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) { + auto resize_parameter = reinterpret_cast(parameter); + method_ = resize_parameter->method_; + new_height_ = resize_parameter->new_height_; + new_width_ = resize_parameter->new_width_; + align_corners_ = resize_parameter->align_corners_; + preserve_aspect_ratio_ = resize_parameter->preserve_aspect_ratio_; + } + ~ResizeNPUKernel() override; + + int IsSupport(const std::vector &inputs, const std::vector &outputs, + OpParameter *opParameter) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; + + ge::Operator *GetNPUOp() override; + + private: + ge::Operator *op_ = nullptr; + int method_; + int64_t new_height_; + int64_t new_width_; + bool align_corners_; + bool preserve_aspect_ratio_; +}; +} // namespace mindspore::kernel +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_RESIZE_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc b/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc index e5a55a5268..97a00c7a11 100644 --- a/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/scale_npu.cc @@ -26,15 +26,22 @@ int ScaleNPUKernel::IsSupport(const std::vector &inputs, const s OpParameter *opParameter) { return RET_OK; } -void ScaleNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) { - op_ = new hiai::op::Scale(name_); + +int ScaleNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Scale(name_); + if (op_ == nullptr) { + return RET_ERROR; + } op_->set_attr_axis(this->axis_); op_->set_input_x(*npu_inputs[0]); op_->set_input_scale(*npu_inputs[1]); op_->set_input_bias(*npu_inputs[2]); + return RET_OK; } + ge::Operator *mindspore::kernel::ScaleNPUKernel::GetNPUOp() { return this->op_; } + ScaleNPUKernel::~ScaleNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/scale_npu.h b/mindspore/lite/src/runtime/kernel/npu/scale_npu.h index 439935eaeb..156fd0aa74 100644 --- a/mindspore/lite/src/runtime/kernel/npu/scale_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/scale_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_SCALE_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_SCALE_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SCALE_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SCALE_NPU_H_ #include #include "nnacl/scale.h" #include "src/runtime/kernel/npu/npu_kernel.h" @@ -24,21 +24,22 @@ namespace mindspore::kernel { class ScaleNPUKernel : public NPUKernel { public: ScaleNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) { + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) { axis_ = reinterpret_cast(parameter)->axis_; } ~ScaleNPUKernel() override; int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::Scale *op_; + hiai::op::Scale *op_ = nullptr; int axis_; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_Scale_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_Scale_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/softmax_npu.cc b/mindspore/lite/src/runtime/kernel/npu/softmax_npu.cc index ab0e06742b..d6a6750173 100644 --- a/mindspore/lite/src/runtime/kernel/npu/softmax_npu.cc +++ b/mindspore/lite/src/runtime/kernel/npu/softmax_npu.cc @@ -26,19 +26,25 @@ int SoftmaxNPUKernel::IsSupport(const std::vector &inputs, const OpParameter *opParameter) { return RET_OK; } -void SoftmaxNPUKernel::SetNPUInputs(const std::vector &inputs, - const std::vector &outputs, - const std::vector &npu_inputs) { - op_ = new hiai::op::Softmax(name_); +int SoftmaxNPUKernel::SetNPUInputs(const std::vector &inputs, + const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Softmax(name_); + if (op_ == nullptr) { + return RET_ERROR; + } if (axis_ == -1) { op_->set_attr_axis(inputs[0]->shape().size() - 1); } else { op_->set_attr_axis(axis_); } op_->set_input_x(*npu_inputs[0]); + return RET_OK; } + ge::Operator *mindspore::kernel::SoftmaxNPUKernel::GetNPUOp() { return this->op_; } + SoftmaxNPUKernel::~SoftmaxNPUKernel() { if (op_ != nullptr) { delete op_; diff --git a/mindspore/lite/src/runtime/kernel/npu/softmax_npu.h b/mindspore/lite/src/runtime/kernel/npu/softmax_npu.h index 641d5f0513..1a4718878e 100644 --- a/mindspore/lite/src/runtime/kernel/npu/softmax_npu.h +++ b/mindspore/lite/src/runtime/kernel/npu/softmax_npu.h @@ -14,8 +14,8 @@ * limitations under the License. */ -#ifndef MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_SOFTMAX_H_ -#define MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_SOFTMAX_H_ +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SOFTMAX_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SOFTMAX_NPU_H_ #include #include "src/runtime/kernel/npu/npu_kernel.h" #include "nnacl/softmax_parameter.h" @@ -24,8 +24,9 @@ namespace mindspore::kernel { class SoftmaxNPUKernel : public NPUKernel { public: SoftmaxNPUKernel(OpParameter *parameter, const std::vector &inputs, - const std::vector &outputs) - : NPUKernel(parameter, inputs, outputs) { + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) { auto softmax_parameter = reinterpret_cast(parameter); axis_ = softmax_parameter->axis_; } @@ -33,13 +34,13 @@ class SoftmaxNPUKernel : public NPUKernel { int IsSupport(const std::vector &inputs, const std::vector &outputs, OpParameter *opParameter) override; - void SetNPUInputs(const std::vector &inputs, const std::vector &outputs, - const std::vector &npu_inputs) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; ge::Operator *GetNPUOp() override; private: - hiai::op::Softmax *op_; + hiai::op::Softmax *op_ = nullptr; int axis_; }; } // namespace mindspore::kernel -#endif // MINDSPORE_LITE_SRC_RUNTIME_AGENT_NPU_KERNEL_NPU_SOFTMAX_H_ +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SOFTMAX_NPU_H_ diff --git a/mindspore/lite/src/runtime/kernel/npu/sub_npu.cc b/mindspore/lite/src/runtime/kernel/npu/sub_npu.cc new file mode 100644 index 0000000000..869ab4348d --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/npu/sub_npu.cc @@ -0,0 +1,57 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "src/runtime/kernel/npu/sub_npu.h" +#include "include/graph/op/all_ops.h" +#include "src/kernel_registry.h" + +using mindspore::kernel::KERNEL_ARCH::kNPU; +using mindspore::lite::KernelRegistrar; +using mindspore::schema::PrimitiveType_Sub; + +namespace mindspore::kernel { +int SubNPUKernel::IsSupport(const std::vector &inputs, const std::vector &outputs, + OpParameter *opParameter) { + if (inputs[0]->shape().size() != inputs[1]->shape().size()) { + MS_LOG(ERROR) << "For the two inputs, the corresponding dimensions must have the same value." + << " shape 1 is:" << inputs[0]->shape() << " shape 2 is:" << inputs[1]->shape(); + return RET_ERROR; + } + return RET_OK; +} + +int SubNPUKernel::SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) { + op_ = new (std::nothrow) hiai::op::Sub(name_); + if (op_ == nullptr) { + return RET_ERROR; + } + op_->set_input_x1(*npu_inputs[0]); + op_->set_input_x2(*npu_inputs[1]); + return RET_OK; +} + +ge::Operator *mindspore::kernel::SubNPUKernel::GetNPUOp() { return this->op_; } + +SubNPUKernel::~SubNPUKernel() { + if (op_ != nullptr) { + delete op_; + op_ = nullptr; + } +} + +REG_KERNEL(kNPU, kNumberTypeFloat32, PrimitiveType_Sub, NPUKernelCreator) +} // namespace mindspore::kernel diff --git a/mindspore/lite/src/runtime/kernel/npu/sub_npu.h b/mindspore/lite/src/runtime/kernel/npu/sub_npu.h new file mode 100644 index 0000000000..fd33afea7e --- /dev/null +++ b/mindspore/lite/src/runtime/kernel/npu/sub_npu.h @@ -0,0 +1,41 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SUB_NPU_H_ +#define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SUB_NPU_H_ +#include +#include "src/runtime/kernel/npu/npu_kernel.h" +#include "include/graph/op/math_defs.h" +namespace mindspore::kernel { +class SubNPUKernel : public NPUKernel { + public: + SubNPUKernel(OpParameter *parameter, const std::vector &inputs, + const std::vector &outputs, const lite::InnerContext *ctx, + const mindspore::lite::PrimitiveC *primitive) + : NPUKernel(parameter, inputs, outputs, ctx, primitive) {} + ~SubNPUKernel() override; + + int IsSupport(const std::vector &inputs, const std::vector &outputs, + OpParameter *opParameter) override; + int SetNPUInputs(const std::vector &inputs, const std::vector &outputs, + const std::vector &npu_inputs) override; + ge::Operator *GetNPUOp() override; + + private: + hiai::op::Sub *op_ = nullptr; +}; +} // namespace mindspore::kernel +#endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_NPU_SUB_NPU_H_ diff --git a/mindspore/lite/src/scheduler.cc b/mindspore/lite/src/scheduler.cc index 15e44dcbc2..ec76e4d152 100644 --- a/mindspore/lite/src/scheduler.cc +++ b/mindspore/lite/src/scheduler.cc @@ -270,7 +270,9 @@ kernel::SubGraphKernel *Scheduler::CreateSubGraphKernel(const std::vectorSetIndex(index); - sub_kernel->Init(); + if (sub_kernel->Init() != RET_OK) { + return nullptr; + } return sub_kernel; #else return nullptr;