| @@ -34,14 +34,6 @@ int ElementCos(float *input, float *output, int element_size) { | |||
| return NNACL_OK; | |||
| } | |||
| // exp: | |||
| int ElementExp(float *input, float *output, int element_size) { | |||
| for (int i = 0; i < element_size; i++) { | |||
| output[i] = expf(input[i]); | |||
| } | |||
| return NNACL_OK; | |||
| } | |||
| // log: | |||
| int ElementLog(float *input, float *output, int element_size) { | |||
| for (int i = 0; i < element_size; i++) { | |||
| @@ -30,8 +30,6 @@ int ElementAbs(float *input, float *output, int element_size); | |||
| int ElementCos(float *input, float *output, int element_size); | |||
| int ElementExp(float *input, float *output, int element_size); | |||
| int ElementLog(float *input, float *output, int element_size); | |||
| int ElementSquare(float *input, float *output, int element_size); | |||
| @@ -0,0 +1,37 @@ | |||
| /** | |||
| * Copyright 2020 Huawei Technologies Co., Ltd | |||
| * | |||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||
| * you may not use this file except in compliance with the License. | |||
| * You may obtain a copy of the License at | |||
| * | |||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||
| * | |||
| * Unless required by applicable law or agreed to in writing, software | |||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| * See the License for the specific language governing permissions and | |||
| * limitations under the License. | |||
| */ | |||
| #include "nnacl/fp32/exp.h" | |||
| #include <math.h> | |||
| #include "nnacl/errorcode.h" | |||
| int Exp(float *input_data, float *output_data, ExpParameter *parameter, int task_id) { | |||
| if (parameter->scale_ == 1) { | |||
| for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) { | |||
| output_data[i] = expf(input_data[i]); | |||
| } | |||
| } else { | |||
| for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) { | |||
| output_data[i] = expf(input_data[i] * parameter->in_scale_); | |||
| } | |||
| } | |||
| if (parameter->out_scale_ != 1) { | |||
| for (size_t i = task_id; i < parameter->element_num_; i += parameter->thread_num_) { | |||
| output_data[i] = output_data[i] * parameter->out_scale_; | |||
| } | |||
| } | |||
| return NNACL_OK; | |||
| } | |||
| @@ -0,0 +1,41 @@ | |||
| /** | |||
| * Copyright 2020 Huawei Technologies Co., Ltd | |||
| * | |||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||
| * you may not use this file except in compliance with the License. | |||
| * You may obtain a copy of the License at | |||
| * | |||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||
| * | |||
| * Unless required by applicable law or agreed to in writing, software | |||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| * See the License for the specific language governing permissions and | |||
| * limitations under the License. | |||
| */ | |||
| #ifndef MINDSPORE_LITE_NNACL_FP32_EXP_H_ | |||
| #define MINDSPORE_LITE_NNACL_FP32_EXP_H_ | |||
| #include "nnacl/op_base.h" | |||
| typedef struct ExpParameter { | |||
| OpParameter op_parameter_; | |||
| int thread_num_; | |||
| float base_; | |||
| float scale_; | |||
| float shift_; | |||
| float in_scale_; | |||
| float out_scale_; | |||
| int element_num_; | |||
| } ExpParameter; | |||
| #ifdef __cplusplus | |||
| extern "C" { | |||
| #endif | |||
| int Exp(float *input_data, float *output_data, ExpParameter *parameter, int task_id); | |||
| #ifdef __cplusplus | |||
| } | |||
| #endif | |||
| #endif // MINDSPORE_LITE_NNACL_FP32_EXP_H_ | |||
| @@ -478,9 +478,9 @@ table Neg { | |||
| } | |||
| table Exp { | |||
| base : float; | |||
| scale : float; | |||
| shift : float; | |||
| base : float = -1.0; | |||
| scale : float = 1.0; | |||
| shift : float = 0.0; | |||
| } | |||
| table Cos { | |||
| @@ -18,16 +18,35 @@ | |||
| namespace mindspore { | |||
| namespace lite { | |||
| #ifndef PRIMITIVE_WRITEABLE | |||
| #ifdef PRIMITIVE_WRITEABLE | |||
| void Exp::SetBase(float base) { this->primitive_->value.AsExp()->base = base; } | |||
| void Exp::SetScale(float scale) { this->primitive_->value.AsExp()->scale = scale; } | |||
| void Exp::SetShift(float shift) { this->primitive_->value.AsExp()->shift = shift; } | |||
| float Exp::GetBase() const { return this->primitive_->value.AsExp()->base; } | |||
| float Exp::GetScale() const { return this->primitive_->value.AsExp()->scale; } | |||
| float Exp::GetShift() const { return this->primitive_->value.AsExp()->shift; } | |||
| #else | |||
| int Exp::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) { | |||
| MS_ASSERT(nullptr != primitive); | |||
| MS_ASSERT(nullptr != fbb); | |||
| auto val_offset = schema::CreateExp(*fbb); | |||
| auto attr = primitive->value_as_Exp(); | |||
| if (attr == nullptr) { | |||
| MS_LOG(ERROR) << "value_as_Exp return nullptr"; | |||
| return RET_ERROR; | |||
| } | |||
| auto val_offset = schema::CreateExp(*fbb, attr->base(), attr->scale(), attr->shift()); | |||
| auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_Exp, val_offset.o); | |||
| fbb->Finish(prim_offset); | |||
| return RET_OK; | |||
| } | |||
| float Exp::GetBase() const { return this->primitive_->value_as_Exp()->base(); } | |||
| float Exp::GetScale() const { return this->primitive_->value_as_Exp()->scale(); } | |||
| float Exp::GetShift() const { return this->primitive_->value_as_Exp()->shift(); } | |||
| #endif | |||
| } // namespace lite | |||
| } // namespace mindspore | |||
| @@ -21,21 +21,27 @@ | |||
| #include <set> | |||
| #include <cmath> | |||
| #include "ir/dtype/type_id.h" | |||
| #include "src/ops/arithmetic_self.h" | |||
| #include "src/ops/primitive_c.h" | |||
| namespace mindspore { | |||
| namespace lite { | |||
| class Exp : public ArithmeticSelf { | |||
| class Exp : public PrimitiveC { | |||
| public: | |||
| #ifdef PRIMITIVE_WRITEABLE | |||
| MS_DECLARE_PARENT(Exp, ArithmeticSelf); | |||
| MS_DECLARE_PARENT(Exp, PrimitiveC); | |||
| Exp() = default; | |||
| explicit Exp(schema::PrimitiveT *primitive) : ArithmeticSelf(primitive) {} | |||
| explicit Exp(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {} | |||
| void SetBase(float base); | |||
| void SetShift(float shift); | |||
| void SetScale(float scale); | |||
| #else | |||
| Exp() = default; | |||
| int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override; | |||
| #endif | |||
| float GetBase() const; | |||
| float GetShift() const; | |||
| float GetScale() const; | |||
| }; | |||
| } // namespace lite | |||
| } // namespace mindspore | |||
| @@ -23,7 +23,7 @@ namespace lite { | |||
| int FloorDiv::UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) { | |||
| MS_ASSERT(nullptr != primitive); | |||
| MS_ASSERT(nullptr != fbb); | |||
| auto val_offset = schema::CreateFloor(*fbb); | |||
| auto val_offset = schema::CreateFloorDiv(*fbb); | |||
| auto prim_offset = schema::CreatePrimitive(*fbb, schema::PrimitiveType_FloorDiv, val_offset.o); | |||
| fbb->Finish(prim_offset); | |||
| return RET_OK; | |||
| @@ -173,6 +173,7 @@ | |||
| #include "nnacl/sparse_to_dense.h" | |||
| #include "nnacl/l2_norm_parameter.h" | |||
| #include "nnacl/detection_post_process_parameter.h" | |||
| #include "nnacl/fp32/exp.h" | |||
| namespace mindspore::kernel { | |||
| @@ -1519,8 +1520,7 @@ OpParameter *PopulateEluParameter(const mindspore::lite::PrimitiveC *primitive) | |||
| return reinterpret_cast<OpParameter *>(elu_parameter); | |||
| } | |||
| OpParameter *PopulateL2NormParameter( | |||
| const mindspore::lite::PrimitiveC *primitive) { | |||
| OpParameter *PopulateL2NormParameter(const mindspore::lite::PrimitiveC *primitive) { | |||
| L2NormParameter *l2_norm_parameter = reinterpret_cast<L2NormParameter *>(malloc(sizeof(L2NormParameter))); | |||
| if (l2_norm_parameter == nullptr) { | |||
| MS_LOG(ERROR) << "malloc L2NormParameter failed."; | |||
| @@ -1568,6 +1568,26 @@ OpParameter *PopulateDetectionPostProcessParameter(const mindspore::lite::Primit | |||
| return reinterpret_cast<OpParameter *>(detection_post_process_parameter); | |||
| } | |||
| OpParameter *PopulateExpParameter(const mindspore::lite::PrimitiveC *primitive) { | |||
| ExpParameter *exp_parameter = reinterpret_cast<ExpParameter *>(malloc(sizeof(ExpParameter))); | |||
| if (exp_parameter == nullptr) { | |||
| MS_LOG(ERROR) << "malloc ExpParameter failed."; | |||
| return nullptr; | |||
| } | |||
| memset(exp_parameter, 0, sizeof(ExpParameter)); | |||
| exp_parameter->op_parameter_.type_ = primitive->Type(); | |||
| auto param = reinterpret_cast<mindspore::lite::Exp *>(const_cast<mindspore::lite::PrimitiveC *>(primitive)); | |||
| exp_parameter->base_ = param->GetBase(); | |||
| exp_parameter->scale_ = param->GetScale(); | |||
| exp_parameter->shift_ = param->GetShift(); | |||
| if (exp_parameter->base_ != -1 && exp_parameter->base_ <= 0) { | |||
| MS_LOG(ERROR) << "Exp base must be strictly positive, got " << exp_parameter->base_; | |||
| free(exp_parameter); | |||
| return nullptr; | |||
| } | |||
| return reinterpret_cast<OpParameter *>(exp_parameter); | |||
| } | |||
| PopulateParameterRegistry::PopulateParameterRegistry() { | |||
| populate_parameter_funcs_[schema::PrimitiveType_SparseToDense] = PopulateSparseToDenseParameter; | |||
| populate_parameter_funcs_[schema::PrimitiveType_SoftMax] = PopulateSoftmaxParameter; | |||
| @@ -1610,7 +1630,7 @@ PopulateParameterRegistry::PopulateParameterRegistry() { | |||
| populate_parameter_funcs_[schema::PrimitiveType_Abs] = PopulateArithmeticSelf; | |||
| populate_parameter_funcs_[schema::PrimitiveType_Cos] = PopulateArithmeticSelf; | |||
| populate_parameter_funcs_[schema::PrimitiveType_Sin] = PopulateArithmeticSelf; | |||
| populate_parameter_funcs_[schema::PrimitiveType_Exp] = PopulateArithmeticSelf; | |||
| populate_parameter_funcs_[schema::PrimitiveType_Exp] = PopulateExpParameter; | |||
| populate_parameter_funcs_[schema::PrimitiveType_Log] = PopulateArithmeticSelf; | |||
| populate_parameter_funcs_[schema::PrimitiveType_Square] = PopulateArithmeticSelf; | |||
| populate_parameter_funcs_[schema::PrimitiveType_Sqrt] = PopulateArithmeticSelf; | |||
| @@ -169,7 +169,6 @@ kernel::LiteKernel *CpuArithmeticSelfFp32KernelCreator(const std::vector<lite::t | |||
| REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Abs, CpuArithmeticSelfFp32KernelCreator) | |||
| REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Cos, CpuArithmeticSelfFp32KernelCreator) | |||
| REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Exp, CpuArithmeticSelfFp32KernelCreator) | |||
| REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Log, CpuArithmeticSelfFp32KernelCreator) | |||
| REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Square, CpuArithmeticSelfFp32KernelCreator) | |||
| REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Sqrt, CpuArithmeticSelfFp32KernelCreator) | |||
| @@ -28,7 +28,6 @@ using mindspore::lite::Context; | |||
| using mindspore::schema::PrimitiveType_Abs; | |||
| using mindspore::schema::PrimitiveType_Ceil; | |||
| using mindspore::schema::PrimitiveType_Cos; | |||
| using mindspore::schema::PrimitiveType_Exp; | |||
| using mindspore::schema::PrimitiveType_Floor; | |||
| using mindspore::schema::PrimitiveType_Log; | |||
| using mindspore::schema::PrimitiveType_LogicalNot; | |||
| @@ -55,9 +54,6 @@ class ArithmeticSelfCPUKernel : public LiteKernel { | |||
| case PrimitiveType_Cos: | |||
| arithmeticSelf_run_ = ElementCos; | |||
| break; | |||
| case PrimitiveType_Exp: | |||
| arithmeticSelf_run_ = ElementExp; | |||
| break; | |||
| case PrimitiveType_Log: | |||
| arithmeticSelf_run_ = ElementLog; | |||
| break; | |||
| @@ -0,0 +1,115 @@ | |||
| /** | |||
| * Copyright 2020 Huawei Technologies Co., Ltd | |||
| * | |||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||
| * you may not use this file except in compliance with the License. | |||
| * You may obtain a copy of the License at | |||
| * | |||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||
| * | |||
| * Unless required by applicable law or agreed to in writing, software | |||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| * See the License for the specific language governing permissions and | |||
| * limitations under the License. | |||
| */ | |||
| #include "src/runtime/kernel/arm/fp32/exp.h" | |||
| #include <math.h> | |||
| #include "include/errorcode.h" | |||
| #include "src/kernel_registry.h" | |||
| #include "src/runtime/runtime_api.h" | |||
| using mindspore::lite::KernelRegistrar; | |||
| using mindspore::lite::RET_ERROR; | |||
| using mindspore::lite::RET_OK; | |||
| using mindspore::schema::PrimitiveType_Exp; | |||
| namespace mindspore::kernel { | |||
| int ExpCPUKernel::Init() { | |||
| exp_parameter_ = reinterpret_cast<ExpParameter *>(op_parameter_); | |||
| exp_parameter_->thread_num_ = thread_count_; | |||
| if (!InferShapeDone()) { | |||
| return RET_OK; | |||
| } | |||
| return ReSize(); | |||
| } | |||
| int ExpCPUKernel::ReSize() { | |||
| exp_parameter_->thread_num_ = thread_count_; | |||
| float log_ = (exp_parameter_->base_ == -1) ? 1 : logf(exp_parameter_->base_); | |||
| exp_parameter_->in_scale_ = exp_parameter_->scale_ * log_; | |||
| if (exp_parameter_->shift_ == 0) { | |||
| exp_parameter_->out_scale_ = 1; | |||
| } else { | |||
| if (log_ == 1) { | |||
| exp_parameter_->out_scale_ = expf(exp_parameter_->shift_); | |||
| } else { | |||
| exp_parameter_->out_scale_ = powf(exp_parameter_->base_, exp_parameter_->shift_); | |||
| } | |||
| } | |||
| return RET_OK; | |||
| } | |||
| int ExpCPUKernel::DoExcute(int task_id) { | |||
| Exp(input_addr_, output_addr_, exp_parameter_, task_id); | |||
| return RET_OK; | |||
| } | |||
| int ExpRun(void *cdata, int task_id) { | |||
| auto ExpData = reinterpret_cast<ExpCPUKernel *>(cdata); | |||
| auto ret = ExpData->DoExcute(task_id); | |||
| if (ret != RET_OK) { | |||
| MS_LOG(ERROR) << "ExpRun error task_id[" << task_id << "] error_code[" << ret << "]"; | |||
| return RET_ERROR; | |||
| } | |||
| return RET_OK; | |||
| } | |||
| int ExpCPUKernel::Run() { | |||
| auto prepare_ret = Prepare(); | |||
| if (prepare_ret != RET_OK) { | |||
| MS_LOG(ERROR) << "Prepare fail!ret: " << prepare_ret; | |||
| return prepare_ret; | |||
| } | |||
| input_addr_ = reinterpret_cast<float *>(in_tensors_.front()->Data()); | |||
| output_addr_ = reinterpret_cast<float *>(out_tensors_.front()->Data()); | |||
| exp_parameter_->element_num_ = in_tensors_.front()->ElementsNum(); | |||
| auto ret = ParallelLaunch(THREAD_POOL_DEFAULT, ExpRun, this, exp_parameter_->thread_num_); | |||
| if (ret != RET_OK) { | |||
| MS_LOG(ERROR) << "Exp error: error_code[" << ret << "]"; | |||
| return RET_ERROR; | |||
| } | |||
| return RET_OK; | |||
| } | |||
| kernel::LiteKernel *CpuExpFp32KernelCreator(const std::vector<lite::tensor::Tensor *> &inputs, | |||
| const std::vector<lite::tensor::Tensor *> &outputs, OpParameter *parameter, | |||
| const lite::Context *ctx, const KernelKey &desc, | |||
| const mindspore::lite::PrimitiveC *primitive) { | |||
| if (parameter == nullptr || ctx == nullptr) { | |||
| MS_LOG(ERROR) << "parameter or ctx is nullptr"; | |||
| return nullptr; | |||
| } | |||
| MS_ASSERT(desc.type == PrimitiveType_Exp); | |||
| auto *kernel = new (std::nothrow) ExpCPUKernel(parameter, inputs, outputs, ctx, primitive); | |||
| if (kernel == nullptr) { | |||
| MS_LOG(ERROR) << "Create Kernel failed, name: " << parameter->name_; | |||
| return nullptr; | |||
| } | |||
| auto ret = kernel->Init(); | |||
| if (ret != RET_OK) { | |||
| MS_LOG(ERROR) << "Init Kernel failed, name: " << parameter->name_ | |||
| << ", type: " << schema::EnumNamePrimitiveType(static_cast<schema::PrimitiveType>(parameter->type_)); | |||
| delete kernel; | |||
| return nullptr; | |||
| } | |||
| return kernel; | |||
| } | |||
| REG_KERNEL(kCPU, kNumberTypeFloat32, PrimitiveType_Exp, CpuExpFp32KernelCreator) | |||
| } // namespace mindspore::kernel | |||
| @@ -0,0 +1,49 @@ | |||
| /** | |||
| * Copyright 2020 Huawei Technologies Co., Ltd | |||
| * | |||
| * Licensed under the Apache License, Version 2.0 (the "License"); | |||
| * you may not use this file except in compliance with the License. | |||
| * You may obtain a copy of the License at | |||
| * | |||
| * http://www.apache.org/licenses/LICENSE-2.0 | |||
| * | |||
| * Unless required by applicable law or agreed to in writing, software | |||
| * distributed under the License is distributed on an "AS IS" BASIS, | |||
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
| * See the License for the specific language governing permissions and | |||
| * limitations under the License. | |||
| */ | |||
| #ifndef MINDSPORE_LITE_SRC_RUNTIME_KERNEL_ARM_FP32_EXP_H_ | |||
| #define MINDSPORE_LITE_SRC_RUNTIME_KERNEL_ARM_FP32_EXP_H_ | |||
| #include <vector> | |||
| #include "src/lite_kernel.h" | |||
| #include "nnacl/fp32/exp.h" | |||
| namespace mindspore::kernel { | |||
| class ExpCPUKernel : public LiteKernel { | |||
| public: | |||
| explicit ExpCPUKernel(OpParameter *parameter, const std::vector<lite::tensor::Tensor *> &inputs, | |||
| const std::vector<lite::tensor::Tensor *> &outputs, const lite::Context *ctx, | |||
| const mindspore::lite::PrimitiveC *primitive) | |||
| : LiteKernel(parameter, inputs, outputs, ctx, primitive), ctx_(ctx), thread_count_(ctx->thread_num_) {} | |||
| ~ExpCPUKernel() override{}; | |||
| int Init() override; | |||
| int ReSize() override; | |||
| int Run() override; | |||
| int DoExcute(int task_id); | |||
| protected: | |||
| const lite::Context *ctx_; | |||
| int thread_count_; | |||
| ExpParameter *exp_parameter_; | |||
| private: | |||
| float *input_addr_; | |||
| float *output_addr_; | |||
| }; | |||
| } // namespace mindspore::kernel | |||
| #endif // MINDSPORE_LITE_SRC_RUNTIME_KERNEL_ARM_FP32_EXP_H_ | |||