Browse Source

modify static check

tags/v1.1.0
lyvette 5 years ago
parent
commit
9ef4c1b9cc
4 changed files with 26 additions and 4 deletions
  1. +8
    -1
      mindspore/lite/tools/converter/legacy_optimizer/graph/tensor_quant_pass.cc
  2. +8
    -2
      mindspore/lite/tools/converter/quantizer/aware_quantizer.cc
  3. +5
    -0
      mindspore/lite/tools/converter/quantizer/calc_quant_param.cc
  4. +5
    -1
      mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc

+ 8
- 1
mindspore/lite/tools/converter/legacy_optimizer/graph/tensor_quant_pass.cc View File

@@ -54,7 +54,14 @@ STATUS TensorQuantPass::Run(schema::MetaGraphT *graph) {
tensor->quantParams.emplace_back(weightQauntParam.release());
}
tensor->dataType = TypeId::kNumberTypeInt8;
::memcpy(tensor->data.data(), qDatas.data(), wShapeSize);
tensor->data.clear();
tensor->data.resize(wShapeSize * sizeof(int8_t));
auto ret =
memcpy_s(tensor->data.data(), wShapeSize * sizeof(int8_t), qDatas.data(), wShapeSize * sizeof(int8_t));
if (ret != EOK) {
MS_LOG(ERROR) << "memcpy_s failed: " << ret;
return RET_ERROR;
}
} else if (quantParam->dstDtype == TypeId::kNumberTypeInt32) {
// quant bias data
auto bShapeSize = GetShapeSize(*(tensor.get()));


+ 8
- 2
mindspore/lite/tools/converter/quantizer/aware_quantizer.cc View File

@@ -98,8 +98,14 @@ STATUS AwareQuantizer::DoQuantize() {
tensor->quantParams.clear();
tensor->quantParams.emplace_back(weightQauntParam.release());
}

::memcpy(tensor->data.data(), qDatas.data(), wShapeSize);
tensor->data.clear();
tensor->data.resize(wShapeSize * sizeof(int8_t));
auto ret =
memcpy_s(tensor->data.data(), wShapeSize * sizeof(int8_t), qDatas.data(), wShapeSize * sizeof(int8_t));
if (ret != EOK) {
MS_LOG(ERROR) << "memcpy_s failed: " << ret;
return RET_ERROR;
}
} else if (quantParam->dstDtype == TypeId::kNumberTypeInt32) {
// quant bias data
auto bShapeSize = GetShapeSize(*(tensor.get()));


+ 5
- 0
mindspore/lite/tools/converter/quantizer/calc_quant_param.cc View File

@@ -216,6 +216,7 @@ int LinearCalcer::Calc(MetaGraphT *graph, const CNodeT &node) {
class CalcConcat : public QuantParamCalcer {
public:
CalcConcat() = default;
~CalcConcat() override = default;

int Calc(MetaGraphT *graph, const CNodeT &node) override {
MS_ASSERT(node.outputIndex.size() == 1);
@@ -280,6 +281,7 @@ class CalcConcat : public QuantParamCalcer {
class CalcAdd : public QuantParamCalcer {
public:
CalcAdd() = default;
~CalcAdd() override = default;

int Calc(MetaGraphT *graph, const CNodeT &node) override {
MS_ASSERT(node.inputIndex.size() == 2);
@@ -357,6 +359,7 @@ class CalcAdd : public QuantParamCalcer {
class CalcRealDiv : public QuantParamCalcer {
public:
CalcRealDiv() = default;
~CalcRealDiv() override = default;

int Calc(MetaGraphT *graph, const CNodeT &node) override {
MS_ASSERT(node.inputIndex.size() == 2);
@@ -424,6 +427,7 @@ class CalcRealDiv : public QuantParamCalcer {
class CalcToSet : public QuantParamCalcer {
public:
CalcToSet(float min, float max) : min(min), max(max) {}
~CalcToSet() override = default;

int Calc(MetaGraphT *graph, const CNodeT &node) override {
MS_ASSERT(node.inputIndex.size() == 1);
@@ -468,6 +472,7 @@ class CalcToSet : public QuantParamCalcer {
class CalcActivation : public QuantParamCalcer {
public:
CalcActivation() = default;
~CalcActivation() override = default;

int Calc(MetaGraphT *subGraph, const CNodeT &node) override {
MS_ASSERT(node.inputIndex.size() == 1);


+ 5
- 1
mindspore/lite/tools/converter/quantizer/post_training_quantizer.cc View File

@@ -1323,7 +1323,11 @@ STATUS PostTrainingQuantizer::BiasCorrection(FuncGraphPtr func_graph) {
MS_LOG(ERROR) << "new char[] failed";
return RET_MEMORY_FAILED;
}
std::memcpy(tensor_data, bias_diff.data(), size);
ret = ::memcpy_s(tensor_data, size * sizeof(char), bias_diff.data(), size * sizeof(char));
if (ret != EOK) {
MS_LOG(ERROR) << "memcpy_s error: " << ret;
return false;
}
param_value->set_tensor_addr(tensor_data);
param_value->set_tensor_size(size);
parameter->set_default_param(param_value);


Loading…
Cancel
Save