Browse Source

code clean

tags/v1.2.0-rc1
lzk 4 years ago
parent
commit
3fbcd29bfd
9 changed files with 16 additions and 16 deletions
  1. +0
    -1
      mindspore/lite/nnacl/fp32/adder_fp32.c
  2. +2
    -2
      mindspore/lite/nnacl/fp32/arithmetic_fp32.c
  3. +1
    -1
      mindspore/lite/nnacl/fp32/gelu_fp32.c
  4. +1
    -1
      mindspore/lite/nnacl/fp32/gelu_fp32.h
  5. +2
    -4
      mindspore/lite/src/ops/partial.h
  6. +2
    -4
      mindspore/lite/src/ops/switch.h
  7. +1
    -1
      mindspore/lite/src/runtime/kernel/arm/fp32/upsample_fp32.h
  8. +5
    -0
      mindspore/lite/src/runtime/kernel/arm/int8/layer_norm_int8.cc
  9. +2
    -2
      mindspore/lite/src/runtime/kernel/arm/int8/transpose_int8.h

+ 0
- 1
mindspore/lite/nnacl/fp32/adder_fp32.c View File

@@ -17,7 +17,6 @@
#include "nnacl/fp32/adder_fp32.h" #include "nnacl/fp32/adder_fp32.h"
#include <string.h> #include <string.h>
#include <math.h> #include <math.h>
#include "nnacl/fp32/common_func_fp32.h"
#include "nnacl/fp32/matmul_fp32.h" #include "nnacl/fp32/matmul_fp32.h"


void Adder12x4(const float *a, const float *b, float *dst, const float *bias, ActType act_type, int deep, int row, void Adder12x4(const float *a, const float *b, float *dst, const float *bias, ActType act_type, int deep, int row,


+ 2
- 2
mindspore/lite/nnacl/fp32/arithmetic_fp32.c View File

@@ -917,7 +917,7 @@ int ElementLogicalAnd(const float *in0, const float *in1, float *out, int size)
int ElementLogicalAndInt(const int *in0, const int *in1, int *out, int size) { int ElementLogicalAndInt(const int *in0, const int *in1, int *out, int size) {
int index = 0; int index = 0;
for (; index < size; index++) { for (; index < size; index++) {
out[index] = (int)((int)(in0[index]) & (int)(in1[index]));
out[index] = (int)((unsigned int)(in0[index]) & (unsigned int)(in1[index]));
} }
return NNACL_OK; return NNACL_OK;
} }
@@ -925,7 +925,7 @@ int ElementLogicalAndInt(const int *in0, const int *in1, int *out, int size) {
int ElementLogicalAndBool(const bool *in0, const bool *in1, bool *out, int size) { int ElementLogicalAndBool(const bool *in0, const bool *in1, bool *out, int size) {
int index = 0; int index = 0;
for (; index < size; index++) { for (; index < size; index++) {
out[index] = (bool)((bool)(in0[index]) & (bool)(in1[index]));
out[index] = (bool)((unsigned int)(in0[index]) & (unsigned int)(in1[index]));
} }
return NNACL_OK; return NNACL_OK;
} }


+ 1
- 1
mindspore/lite/nnacl/fp32/gelu_fp32.c View File

@@ -20,7 +20,7 @@
#include <math.h> #include <math.h>
#include "nnacl/errorcode.h" #include "nnacl/errorcode.h"


int DoGeLU(float *src, float *out, int64_t real_dst_count, const GeLUParameter *param) {
int DoGeLU(const float *src, float *out, int64_t real_dst_count, const GeLUParameter *param) {
if (src == NULL || out == NULL) { if (src == NULL || out == NULL) {
return NNACL_ERR; return NNACL_ERR;
} }


+ 1
- 1
mindspore/lite/nnacl/fp32/gelu_fp32.h View File

@@ -23,7 +23,7 @@
extern "C" { extern "C" {
#endif #endif


int DoGeLU(float *src, float *out, int64_t real_dst_count, const GeLUParameter *param);
int DoGeLU(const float *src, float *out, int64_t real_dst_count, const GeLUParameter *param);
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif


+ 2
- 4
mindspore/lite/src/ops/partial.h View File

@@ -28,15 +28,13 @@ namespace mindspore {
namespace lite { namespace lite {
class Partial : public PrimitiveC { class Partial : public PrimitiveC {
public: public:
Partial() = default;
~Partial() = default;
#ifdef PRIMITIVE_WRITEABLE #ifdef PRIMITIVE_WRITEABLE
MS_DECLARE_PARENT(Partial, PrimitiveC); MS_DECLARE_PARENT(Partial, PrimitiveC);
Partial() = default;
explicit Partial(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {} explicit Partial(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override; int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;

#else #else
Partial() = default;

int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override; int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
#endif #endif
int InferShape(std::vector<lite::Tensor *> inputs_, std::vector<lite::Tensor *> outputs_) override; int InferShape(std::vector<lite::Tensor *> inputs_, std::vector<lite::Tensor *> outputs_) override;


+ 2
- 4
mindspore/lite/src/ops/switch.h View File

@@ -28,15 +28,13 @@ namespace mindspore {
namespace lite { namespace lite {
class Switch : public PrimitiveC { class Switch : public PrimitiveC {
public: public:
Switch() = default;
~Switch() = default;
#ifdef PRIMITIVE_WRITEABLE #ifdef PRIMITIVE_WRITEABLE
MS_DECLARE_PARENT(Switch, PrimitiveC); MS_DECLARE_PARENT(Switch, PrimitiveC);
Switch() = default;
explicit Switch(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {} explicit Switch(schema::PrimitiveT *primitive) : PrimitiveC(primitive) {}
int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override; int UnPackAttr(const Primitive &prim, const std::vector<AnfNodePtr> &inputs) override;

#else #else
Switch() = default;

int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override; int UnPackToFlatBuilder(const schema::Primitive *primitive, flatbuffers::FlatBufferBuilder *fbb) override;
#endif #endif
int InferShape(std::vector<lite::Tensor *> inputs_, std::vector<lite::Tensor *> outputs_) override; int InferShape(std::vector<lite::Tensor *> inputs_, std::vector<lite::Tensor *> outputs_) override;


+ 1
- 1
mindspore/lite/src/runtime/kernel/arm/fp32/upsample_fp32.h View File

@@ -36,7 +36,7 @@ class UpsampleCPUKernel : public ResizeCPUKernel {
int RunImpl(int task_id) override; int RunImpl(int task_id) override;


private: private:
UpsampleParameter *param_;
UpsampleParameter *param_ = nullptr;
}; };
} // namespace mindspore::kernel } // namespace mindspore::kernel




+ 5
- 0
mindspore/lite/src/runtime/kernel/arm/int8/layer_norm_int8.cc View File

@@ -19,6 +19,7 @@


using mindspore::lite::KernelRegistrar; using mindspore::lite::KernelRegistrar;
using mindspore::lite::RET_ERROR; using mindspore::lite::RET_ERROR;
using mindspore::lite::RET_NULL_PTR;
using mindspore::lite::RET_OK; using mindspore::lite::RET_OK;
using mindspore::schema::PrimitiveType_LayerNorm; using mindspore::schema::PrimitiveType_LayerNorm;


@@ -90,6 +91,10 @@ int LayerNormInt8CPUKernel::ReSize() {
op_parameter_ = nullptr; op_parameter_ = nullptr;
} }
op_parameter_ = PopulateLayerNormParameter(primitive_); op_parameter_ = PopulateLayerNormParameter(primitive_);
if (op_parameter_ == nullptr) {
MS_LOG(ERROR) << "op_parameter_ is nullptr!";
return RET_NULL_PTR;
}
op_parameter_->thread_num_ = context_->thread_num_; op_parameter_->thread_num_ = context_->thread_num_;
param_ = reinterpret_cast<LayerNormParameter *>(op_parameter_); param_ = reinterpret_cast<LayerNormParameter *>(op_parameter_);
auto shape = in_tensors_.front()->shape(); auto shape = in_tensors_.front()->shape();


+ 2
- 2
mindspore/lite/src/runtime/kernel/arm/int8/transpose_int8.h View File

@@ -55,8 +55,8 @@ class TransposeInt8CPUKernel : public LiteKernel {
int thread_h_stride_ = 0; int thread_h_stride_ = 0;
int thread_h_num_ = 0; int thread_h_num_ = 0;
int num_unit_ = 0; int num_unit_ = 0;
int in_shape_[8];
int out_shape_[8];
int in_shape_[8] = {0};
int out_shape_[8] = {0};
}; };
} // namespace mindspore::kernel } // namespace mindspore::kernel




Loading…
Cancel
Save