diff --git a/mindspore/lite/src/runtime/kernel/arm/fp32/scale.cc b/mindspore/lite/src/runtime/kernel/arm/fp32/scale.cc index 2d94d9200f..58161c8a47 100644 --- a/mindspore/lite/src/runtime/kernel/arm/fp32/scale.cc +++ b/mindspore/lite/src/runtime/kernel/arm/fp32/scale.cc @@ -192,41 +192,19 @@ kernel::LiteKernel *CpuScaleFp32KernelCreator(const std::vector MS_LOG(ERROR) << "opParameter is nullptr"; return nullptr; } - auto *weight_tensor = inputs.at(kWeightIndex); - auto *restore_data = weight_tensor->MutableData(); - if (weight_tensor->data_type() == kNumberTypeInt8 || primitive->GetQuantType() == schema::QuantType_WeightQuant) { - auto *dequant_weight = kernel::LiteKernelUtil::DequantWeight(weight_tensor); - if (dequant_weight == nullptr) { - MS_LOG(ERROR) << "dequant data is nullptr."; - return nullptr; - } - weight_tensor->SetData(dequant_weight); - } + auto *kernel = new (std::nothrow) ScaleCPUKernel(opParameter, inputs, outputs, ctx, primitive); if (kernel == nullptr) { MS_LOG(ERROR) << "New kernel fails."; - if (weight_tensor->data_type() == kNumberTypeInt8 || primitive->GetQuantType() == schema::QuantType_WeightQuant) { - weight_tensor->FreeData(); - weight_tensor->SetData(restore_data); - } return nullptr; } - auto ret = kernel->Init(); if (ret != RET_OK) { MS_LOG(ERROR) << "Init kernel failed, name: " << opParameter->name_ << ", type: " << schema::EnumNamePrimitiveType(static_cast(opParameter->type_)); delete kernel; - if (weight_tensor->data_type() == kNumberTypeInt8 || primitive->GetQuantType() == schema::QuantType_WeightQuant) { - weight_tensor->FreeData(); - weight_tensor->SetData(restore_data); - } return nullptr; } - if (weight_tensor->data_type() == kNumberTypeInt8 || primitive->GetQuantType() == schema::QuantType_WeightQuant) { - weight_tensor->FreeData(); - weight_tensor->SetData(restore_data); - } return kernel; } diff --git a/mindspore/lite/test/run_benchmark_nets.sh b/mindspore/lite/test/run_benchmark_nets.sh index 9880dfdcd3..d315be893e 100644 --- a/mindspore/lite/test/run_benchmark_nets.sh +++ b/mindspore/lite/test/run_benchmark_nets.sh @@ -617,17 +617,9 @@ basepath=$(pwd) echo ${basepath} #set -e -# Example:sh run_benchmark_nets.sh -a /home/temp_test -c /home/temp_test -r /home/temp_test -m /home/temp_test/models -d "8KE5T19620002408" -while getopts "a:c:r:m:d:" opt; do +# Example:sh run_benchmark_nets.sh -r /home/temp_test -m /home/temp_test/models -d "8KE5T19620002408" +while getopts "r:m:d:" opt; do case ${opt} in - a) - arm_path=${OPTARG} - echo "arm_path is ${OPTARG}" - ;; - c) - converter_path=${OPTARG} - echo "converter_path is ${OPTARG}" - ;; r) release_path=${OPTARG} echo "release_path is ${OPTARG}" @@ -646,9 +638,6 @@ while getopts "a:c:r:m:d:" opt; do esac done -echo ${arm_path} -echo ${converter_path} - mkdir train arm64_path=${release_path}/android_aarch64 mv ${arm64_path}/*runtime-*train* ./train diff --git a/mindspore/lite/tools/converter/quantizer/quantize_util.cc b/mindspore/lite/tools/converter/quantizer/quantize_util.cc index 6a0ea9b47b..498b47c8c5 100644 --- a/mindspore/lite/tools/converter/quantizer/quantize_util.cc +++ b/mindspore/lite/tools/converter/quantizer/quantize_util.cc @@ -33,10 +33,10 @@ namespace mindspore { namespace lite { namespace quant { const std::vector QuantStrategy::conv_types = { - schema::PrimitiveType_DeConv2D, schema::PrimitiveType_DeDepthwiseConv2D, - schema::PrimitiveType_Conv2D, schema::PrimitiveType_DepthwiseConv2D}; -const std::vector QuantStrategy::mul_types = { - schema::PrimitiveType_Mul, schema::PrimitiveType_MatMul, schema::PrimitiveType_FullConnection}; + schema::PrimitiveType_DeConv2D, schema::PrimitiveType_DeDepthwiseConv2D, schema::PrimitiveType_Conv2D, + schema::PrimitiveType_DepthwiseConv2D}; +const std::vector QuantStrategy::mul_types = {schema::PrimitiveType_MatMul, + schema::PrimitiveType_FullConnection}; QuantStrategy::QuantStrategy(size_t weightSize, size_t convWeightQuantChannelThreshold) : mWeightSize(weightSize), mConvWeightQuantChannelThreshold(convWeightQuantChannelThreshold) {}