diff --git a/mindspore/lite/src/ops/constant_of_shape.cc b/mindspore/lite/src/ops/constant_of_shape.cc index d214d72aa7..ff4efebfde 100644 --- a/mindspore/lite/src/ops/constant_of_shape.cc +++ b/mindspore/lite/src/ops/constant_of_shape.cc @@ -14,10 +14,10 @@ * limitations under the License. */ +#include "src/ops/constant_of_shape.h" #include "include/errorcode.h" #include "utils/log_adapter.h" #include "src/ir/tensor.h" -#include "src/ops/constant_of_shape.h" namespace mindspore::lite { namespace { @@ -25,9 +25,9 @@ constexpr int kShapeInputNum = 1; constexpr int kShapeOutputNum = 1; } // namespace #ifdef PRIMITIVE_WRITEABLE -int ConstantOfShape::GetValue() const { return this->primitive->value.AsConstantOfShape()->Value; } +float ConstantOfShape::GetValue() const { return this->primitive->value.AsConstantOfShape()->value; } -void ConstantOfShape::SetValue(float value) { this->primitive->value.AsConstantOfShape()->Value = value; } +void ConstantOfShape::SetValue(float value) { this->primitive->value.AsConstantOfShape()->value = value; } #else diff --git a/mindspore/lite/src/ops/conv2d.cc b/mindspore/lite/src/ops/conv2d.cc index 467fe256d2..d0564a8f83 100644 --- a/mindspore/lite/src/ops/conv2d.cc +++ b/mindspore/lite/src/ops/conv2d.cc @@ -104,19 +104,18 @@ void Conv2D::SetActivationType(int activation_type) {} #endif void Conv2D::ConvInferShape(int input_h, int input_w, int *output_h, int *output_w) { MS_ASSERT(this->primitive != nullptr); - auto conv2DPrim = this->primitive->value_as_Conv2D(); - int kernel_w = conv2DPrim->kernelW(); - int kernel_h = conv2DPrim->kernelH(); - int stride_w = conv2DPrim->strideW(); - int stride_h = conv2DPrim->strideH(); - int dilate_w = conv2DPrim->dilateW(); - int dilate_h = conv2DPrim->dilateH(); - pad_l_ = conv2DPrim->padLeft(); - pad_u_ = conv2DPrim->padUp(); - pad_d_ = conv2DPrim->padDown(); - pad_r_ = conv2DPrim->padRight(); + int kernel_w = GetKernelW(); + int kernel_h = GetKernelH(); + int stride_w = GetStrideW(); + int stride_h = GetStrideH(); + int dilate_w = GetDilateW(); + int dilate_h = GetDilateH(); + pad_l_ = GetPadLeft(); + pad_u_ = GetPadUp(); + pad_d_ = GetPadDown(); + pad_r_ = GetPadRight(); - if (conv2DPrim->padMode() == schema::PadMode_SAME) { + if (GetPadMode() == schema::PadMode_SAME) { *output_w = std::ceil(static_cast(input_w) / static_cast(stride_w)); *output_h = std::ceil(static_cast(input_h) / static_cast(stride_h)); auto pad_h_all = ((*output_h - 1) * stride_h + (kernel_h - 1) * dilate_h + 1 - input_h); diff --git a/mindspore/lite/src/ops/depth_to_space.cc b/mindspore/lite/src/ops/depth_to_space.cc index db24a90e3e..ec5ccf72c7 100644 --- a/mindspore/lite/src/ops/depth_to_space.cc +++ b/mindspore/lite/src/ops/depth_to_space.cc @@ -23,7 +23,7 @@ int DepthToSpace::GetBlockSize() const { return this->primitive->value.AsDepthTo int DepthToSpace::GetFormat() const { return this->primitive->value.AsDepthToSpace()->format; } void DepthToSpace::SetBlockSize(int block_size) { this->primitive->value.AsDepthToSpace()->blockSize = block_size; } -void DepthToSpace::SetFormat(int format) { this->primitive->value.AsDepthToSpace()->format = format; } +void DepthToSpace::SetFormat(int format) { this->primitive->value.AsDepthToSpace()->format = (schema::Format)format; } #else diff --git a/mindspore/lite/src/ops/fill.cc b/mindspore/lite/src/ops/fill.cc index df6b126881..0e9e73a468 100644 --- a/mindspore/lite/src/ops/fill.cc +++ b/mindspore/lite/src/ops/fill.cc @@ -50,13 +50,12 @@ int Fill::InferShape(std::vector inputs_, std::vectorprimitive->value_as_Fill(); - if (fill_prim == nullptr) { - MS_LOG(ERROR) << "Fill primitive is null!"; - return RET_ERROR; - } + std::vector output_shape; - (void)output_shape.insert(output_shape.begin(), fill_prim->dims()->begin(), fill_prim->dims()->end()); + for (int i = 0; i < GetDims().size(); i++) { + output_shape.push_back(GetDims()[i]); + } +// (void)output_shape.insert(output_shape.begin(), GetDims().begin(), GetDims().end()); output->set_shape(output_shape); return RET_OK; } diff --git a/mindspore/lite/src/ops/full_connection.cc b/mindspore/lite/src/ops/full_connection.cc index 985fa76f0c..20b480fa19 100644 --- a/mindspore/lite/src/ops/full_connection.cc +++ b/mindspore/lite/src/ops/full_connection.cc @@ -22,13 +22,13 @@ namespace lite { bool FullConnection::GetHasBias() const { return this->primitive->value.AsFullConnection()->hasBias; } int FullConnection::GetAxis() const { return this->primitive->value.AsFullConnection()->axis; } bool FullConnection::GetUseAxis() const { return this->primitive->value.AsFullConnection()->useAxis; } -int FullConnection::GetActivationType() const { return this->primitive->value.AsFullConnection()->activationType(); } +int FullConnection::GetActivationType() const { return this->primitive->value.AsFullConnection()->activationType; } void FullConnection::SetHasBias(bool has_bias) { this->primitive->value.AsFullConnection()->hasBias = has_bias; } void FullConnection::SetAxis(int axis) { this->primitive->value.AsFullConnection()->axis = axis; } void FullConnection::SetUseAxis(bool use_axis) { this->primitive->value.AsFullConnection()->useAxis = use_axis; } void FullConnection::SetActivationType(int activationType) { - his->primitive->value.AsFullConnection()->activationType = (schema::ActivationType)activationType; + this->primitive->value.AsFullConnection()->activationType = (schema::ActivationType) activationType; } #else diff --git a/mindspore/lite/src/ops/mul.cc b/mindspore/lite/src/ops/mul.cc index 65205ecd57..c02baa7c0c 100644 --- a/mindspore/lite/src/ops/mul.cc +++ b/mindspore/lite/src/ops/mul.cc @@ -21,7 +21,9 @@ namespace lite { #ifdef PRIMITIVE_WRITEABLE int Mul::GetActivationType() const { return this->primitive->value.AsMul()->activationType; } -void Mul::SetActivationType(int activation_type) { this->primitive->value.AsMul()->activationType = activation_type; } +void Mul::SetActivationType(int activation_type) { + this->primitive->value.AsMul()->activationType = (schema::ActivationType) activation_type; +} #else diff --git a/mindspore/lite/src/ops/pad.cc b/mindspore/lite/src/ops/pad.cc index 2d20028b69..1d7f0f24e1 100644 --- a/mindspore/lite/src/ops/pad.cc +++ b/mindspore/lite/src/ops/pad.cc @@ -24,7 +24,9 @@ int Pad::GetPaddingMode() const { return this->primitive->value.AsPad()->padding float Pad::GetConstantValue() const { return this->primitive->value.AsPad()->constantValue; } void Pad::SetPaddings(const std::vector &paddings) { this->primitive->value.AsPad()->paddings = paddings; } -void Pad::SetPaddingMode(int padding_mode) { this->primitive->value.AsPad()->paddingMode = padding_mode; } +void Pad::SetPaddingMode(int padding_mode) { + this->primitive->value.AsPad()->paddingMode = (schema::PaddingMode) padding_mode; +} void Pad::SetConstantValue(float constant_value) { this->primitive->value.AsPad()->constantValue = constant_value; } #else diff --git a/mindspore/lite/src/ops/pooling.cc b/mindspore/lite/src/ops/pooling.cc index 6ac40c9af2..ac08607d67 100644 --- a/mindspore/lite/src/ops/pooling.cc +++ b/mindspore/lite/src/ops/pooling.cc @@ -34,22 +34,22 @@ int Pooling::GetPadLeft() const { return this->primitive->value.AsPooling()->pad int Pooling::GetPadRight() const { return this->primitive->value.AsPooling()->padRight; } int Pooling::GetRoundMode() const { return this->primitive->value.AsPooling()->roundMode; } -void Pooling::SetFormat(int format) { this->primitive->value.AsPooling()->format = (schema::Format)format; } +void Pooling::SetFormat(int format) { this->primitive->value.AsPooling()->format = (schema::Format) format; } void Pooling::SetPoolingMode(int pooling_mode) { - this->primitive->value.AsPooling()->poolingMode = (schema::PoolMode)pooling_mode; + this->primitive->value.AsPooling()->poolingMode = (schema::PoolMode) pooling_mode; } void Pooling::SetGlobal(bool global) { this->primitive->value.AsPooling()->global = global; } void Pooling::SetWindowW(int window_w) { this->primitive->value.AsPooling()->windowW = window_w; } void Pooling::SetWindowH(int window_h) { this->primitive->value.AsPooling()->windowH = window_h; } void Pooling::SetStrideW(int stride_w) { this->primitive->value.AsPooling()->strideW = stride_w; } void Pooling::SetStrideH(int stride_h) { this->primitive->value.AsPooling()->strideH = stride_h; } -void Pooling::SetPadMode(int pad_mode) { this->primitive->value.AsPooling()->padMode = (schema::PadMode)pad_mode; } +void Pooling::SetPadMode(int pad_mode) { this->primitive->value.AsPooling()->padMode = (schema::PadMode) pad_mode; } void Pooling::SetPadUp(int pad_up) { this->primitive->value.AsPooling()->padUp = pad_up; } void Pooling::SetPadDown(int pad_down) { this->primitive->value.AsPooling()->padDown = pad_down; } void Pooling::SetPadLeft(int pad_left) { this->primitive->value.AsPooling()->padLeft = pad_left; } void Pooling::SetPadRight(int pad_right) { this->primitive->value.AsPooling()->padRight = pad_right; } void Pooling::SetRoundMode(int round_mode) { - this->primitive->value.AsPooling()->roundMode = (schema::RoundMode)round_mode; + this->primitive->value.AsPooling()->roundMode = (schema::RoundMode) round_mode; } #else @@ -82,13 +82,13 @@ void Pooling::SetPadLeft(int pad_left) {} void Pooling::SetPadRight(int pad_right) {} void Pooling::SetRoundMode(int round_mode) {} +#endif + int Pooling::PadUp() const { return this->pad_u_; } int Pooling::PadDown() const { return this->pad_d_; } int Pooling::PadLeft() const { return this->pad_l_; } int Pooling::PadRight() const { return this->pad_r_; } -#endif - int Pooling::InferShape(std::vector inputs_, std::vector outputs_) { MS_ASSERT(this->primitive != nullptr); auto input = inputs_.front(); @@ -102,37 +102,37 @@ int Pooling::InferShape(std::vector inputs_, std::vectorshape().at(1); int input_w = input->shape().at(2); - auto pooling_prim = this->primitive->value_as_Pooling(); + MS_ASSERT(pooling_prim != nullptr); - auto window_h = pooling_prim->windowH(); - auto window_w = pooling_prim->windowW(); - if (pooling_prim->global()) { + auto window_h = GetWindowH(); + auto window_w = GetWindowW(); + if (GetGlobal()) { window_h = input_h; window_w = input_w; } int output_h = 0; int output_w = 0; - pad_l_ = pooling_prim->padLeft(); - pad_u_ = pooling_prim->padUp(); - pad_d_ = pooling_prim->padDown(); - pad_r_ = pooling_prim->padRight(); - if (pooling_prim->padMode() == schema::PadMode_SAME) { - output_w = std::ceil(static_cast(input_w) / static_cast(pooling_prim->strideW())); - output_h = std::ceil(static_cast(input_h) / static_cast(pooling_prim->strideH())); - auto pad_h_all = ((output_h - 1) * pooling_prim->strideH() + (window_h - 1) + 1 - input_h); - auto pad_w_all = ((output_w - 1) * pooling_prim->strideW() + (window_w - 1) + 1 - input_w); + pad_l_ = GetPadLeft(); + pad_u_ = GetPadUp(); + pad_d_ = GetPadDown(); + pad_r_ = GetPadRight(); + if (GetPadMode() == schema::PadMode_SAME) { + output_w = std::ceil(static_cast(input_w) / static_cast(GetStrideW())); + output_h = std::ceil(static_cast(input_h) / static_cast(GetStrideH())); + auto pad_h_all = ((output_h - 1) * GetStrideH() + (window_h - 1) + 1 - input_h); + auto pad_w_all = ((output_w - 1) * GetStrideW() + (window_w - 1) + 1 - input_w); pad_u_ = pad_h_all / 2; pad_d_ = pad_h_all - pad_u_; pad_l_ = pad_w_all / 2; pad_r_ = pad_w_all - pad_l_; } else { - auto round_mode = pooling_prim->roundMode(); + auto round_mode = (schema::RoundMode) GetRoundMode(); if (round_mode == schema::RoundMode_FLOOR) { - output_h = std::floor(static_cast(input_h + pad_u_ + pad_d_ - window_h) / pooling_prim->strideH()) + 1; - output_w = std::floor(static_cast(input_w + pad_l_ + pad_r_ - window_w) / pooling_prim->strideW()) + 1; + output_h = std::floor(static_cast(input_h + pad_u_ + pad_d_ - window_h) / GetStrideH()) + 1; + output_w = std::floor(static_cast(input_w + pad_l_ + pad_r_ - window_w) / GetStrideW()) + 1; } else if (round_mode == schema::RoundMode_CEIL) { - output_h = std::ceil(static_cast(input_h + pad_u_ + pad_d_ - window_h) / pooling_prim->strideH()) + 1; - output_w = std::ceil(static_cast(input_w + pad_l_ + pad_r_ - window_w) / pooling_prim->strideW()) + 1; + output_h = std::ceil(static_cast(input_h + pad_u_ + pad_d_ - window_h) / GetStrideH()) + 1; + output_w = std::ceil(static_cast(input_w + pad_l_ + pad_r_ - window_w) / GetStrideW()) + 1; } else { MS_LOG(ERROR) << "unsupported round mode."; } diff --git a/mindspore/lite/src/ops/reshape.cc b/mindspore/lite/src/ops/reshape.cc index fd136d0bbb..26ea1165c5 100644 --- a/mindspore/lite/src/ops/reshape.cc +++ b/mindspore/lite/src/ops/reshape.cc @@ -26,7 +26,7 @@ namespace lite { int Reshape::GetFormat() const { return this->primitive->value.AsReshape()->format; } std::vector Reshape::GetShape() const { return this->primitive->value.AsReshape()->shape; } -void Reshape::SetFormat(int format) { this->primitive->value.AsReshape()->format = format; } +void Reshape::SetFormat(int format) { this->primitive->value.AsReshape()->format = (schema::Format) format; } void Reshape::SetShape(const std::vector &shape) { this->primitive->value.AsReshape()->shape = shape; } #else @@ -75,7 +75,7 @@ int Reshape::CalNewShape(const tensor::Tensor *in_tensor, std::vector *out_ } return RET_OK; } -template +template void CalShape(const T *data, const std::vector &inputs, std::vector *out_shape, int shape_size) { int input_count = inputs[0]->ElementsNum(); int index = 0; @@ -103,7 +103,7 @@ int Reshape::InferShape(std::vector inputs_, std::vectorprimitive->value_as_Reshape(); + MS_ASSERT(reshape_prim != nullptr); std::vector out_shape; if (inputs_.size() == kDoubleNum) { @@ -117,30 +117,38 @@ int Reshape::InferShape(std::vector inputs_, std::vector(shape_tensor->Data()); CalShape(data, inputs_, &out_shape, shape_size); - } break; + } + break; case kNumberTypeInt32: { auto data = reinterpret_cast(shape_tensor->Data()); CalShape(data, inputs_, &out_shape, shape_size); - } break; + } + break; case kNumberTypeInt64: { auto data = reinterpret_cast(shape_tensor->Data()); CalShape(data, inputs_, &out_shape, shape_size); - } break; + } + break; case kNumberTypeFloat: { auto data = reinterpret_cast(shape_tensor->Data()); CalShape(data, inputs_, &out_shape, shape_size); - } break; + } + break; case kNumberTypeUInt32: { auto data = reinterpret_cast(shape_tensor->Data()); CalShape(data, inputs_, &out_shape, shape_size); - } break; + } + break; default: { MS_LOG(ERROR) << "Reshape weight tensor has unsupported dataType: " << shape_tensor->data_type(); return RET_INFER_ERR; } } } else if (inputs_.size() == kSingleNum) { - std::copy(reshape_prim->shape()->begin(), reshape_prim->shape()->end(), std::back_inserter(out_shape)); + for (int i = 0; i < GetShape().size(); ++i) { + out_shape.push_back(GetShape()[i]); + } +// std::copy(GetShape().begin(), GetShape().end(), std::back_inserter(out_shape)); } else { MS_LOG(ERROR) << "inputs tensor size invalid."; return RET_INFER_ERR; diff --git a/mindspore/lite/src/ops/slice.cc b/mindspore/lite/src/ops/slice.cc index a221a11eac..80f52d59f5 100644 --- a/mindspore/lite/src/ops/slice.cc +++ b/mindspore/lite/src/ops/slice.cc @@ -30,7 +30,7 @@ int SliceOp::GetFormat() const { return this->primitive->value.AsSlice()->format std::vector SliceOp::GetBegin() const { return this->primitive->value.AsSlice()->begin; } std::vector SliceOp::GetSize() const { return this->primitive->value.AsSlice()->size; } -void SliceOp::SetFormat(int format) { this->primitive->value.AsSlice()->format = format; } +void SliceOp::SetFormat(int format) { this->primitive->value.AsSlice()->format = (schema::Format)format; } void SliceOp::SetBegin(const std::vector &begin) { this->primitive->value.AsSlice()->begin = begin; } void SliceOp::SetSize(const std::vector &size) { this->primitive->value.AsSlice()->size = size; } diff --git a/mindspore/lite/src/ops/space_to_depth.cc b/mindspore/lite/src/ops/space_to_depth.cc index 261a2aa19b..8df6048163 100644 --- a/mindspore/lite/src/ops/space_to_depth.cc +++ b/mindspore/lite/src/ops/space_to_depth.cc @@ -24,7 +24,7 @@ int SpaceToDepth::GetBlockSize() const { return this->primitive->value.AsSpaceTo int SpaceToDepth::GetFormat() const { return this->primitive->value.AsSpaceToDepth()->format; } void SpaceToDepth::SetBlockSize(int block_size) { this->primitive->value.AsSpaceToDepth()->blockSize = block_size; } -void SpaceToDepth::SetFormat(int format) { this->primitive->value.AsSpaceToDepth()->format = format; } +void SpaceToDepth::SetFormat(int format) { this->primitive->value.AsSpaceToDepth()->format = (schema::Format)format; } #else diff --git a/mindspore/lite/src/ops/split.cc b/mindspore/lite/src/ops/split.cc index ab17e87b71..521a6ce736 100644 --- a/mindspore/lite/src/ops/split.cc +++ b/mindspore/lite/src/ops/split.cc @@ -50,7 +50,6 @@ int Split::InferShape(std::vector inputs_, std::vectorprimitive != nullptr); auto input = inputs_.front(); MS_ASSERT(input != nullptr); - auto spilt_prim = this->primitive->value_as_Split(); MS_ASSERT(spilt_prim != nullptr); if (inputs_.size() != kSplitInputNum) { MS_LOG(ERROR) << "inputs number is not equal to " << kSplitInputNum; @@ -61,7 +60,7 @@ int Split::InferShape(std::vector inputs_, std::vectornumberSplit(); + int number_split = GetNumberSplit(); if (static_cast(outputs_.size()) != number_split) { MS_LOG(ERROR) << "outputs number is not equal to " << number_split; return RET_ERROR; @@ -73,10 +72,12 @@ int Split::InferShape(std::vector inputs_, std::vectorsplitDim(); + int split_dim = GetSplitDim(); std::vector input_shape = input->shape(); std::vector size_split; - size_split.insert(size_split.begin(), spilt_prim->sizeSplits()->begin(), spilt_prim->sizeSplits()->end()); + for (int i = 0; i < GetSizeSplits().size(); ++i) { + size_split.push_back(GetSizeSplits()[i]); + } for (int i = 0; i < number_split; ++i) { std::vector output_shape; output_shape.insert(output_shape.begin(), input_shape.begin(), input_shape.end()); diff --git a/mindspore/lite/src/ops/tile.cc b/mindspore/lite/src/ops/tile.cc index 38cbe30991..427aafdb89 100644 --- a/mindspore/lite/src/ops/tile.cc +++ b/mindspore/lite/src/ops/tile.cc @@ -24,6 +24,10 @@ std::vector Tile::GetMultiples() const { return this->primitive->value.AsTi void Tile::SetMultiples(const std::vector &multiples) { this->primitive->value.AsTile()->multiples = multiples; } +std::vector Tile::GetDims() const { return this->primitive->value.AsTile()->multiples; } + +void Tile::SetDims(const std::vector &dims) { this->primitive->value.AsTile()->dims = dims; } + #else std::vector Tile::GetMultiples() const { @@ -32,6 +36,13 @@ std::vector Tile::GetMultiples() const { } void Tile::SetMultiples(const std::vector &multiples) {} + +std::vector Tile::GetDims() const { + auto fb_vector = this->primitive->value_as_Tile()->dims(); + return std::vector(fb_vector->begin(), fb_vector->end()); +} + +void Tile::SetDims(const std::vector &dims) {} #endif int Tile::InferShape(std::vector inputs_, std::vector outputs_) { @@ -45,11 +56,14 @@ int Tile::InferShape(std::vector inputs_, std::vectorprimitive->value_as_Tile(); + MS_ASSERT(tile_prim != nullptr); std::vector out_shape; std::vector multiples; - std::copy(tile_prim->multiples()->begin(), tile_prim->multiples()->end(), std::back_inserter(multiples)); + for (int i = 0; i < GetMultiples().size(); ++i) { + multiples.push_back(GetMultiples()[i]); + } +// std::copy(GetMultiples().begin(), GetMultiples().end(), std::back_inserter(multiples)); for (size_t i = 0; i < input->shape().size(); ++i) { int tmp = input->shape()[i] * multiples[i]; out_shape.push_back(tmp); diff --git a/mindspore/lite/src/ops/tile.h b/mindspore/lite/src/ops/tile.h index 8c43a0a04f..720b8baabd 100644 --- a/mindspore/lite/src/ops/tile.h +++ b/mindspore/lite/src/ops/tile.h @@ -37,6 +37,8 @@ class Tile : public PrimitiveC { int InferShape(std::vector inputs_, std::vector outputs_) override; std::vector GetMultiples() const; void SetMultiples(const std::vector &multiples); + std::vector GetDims() const; + void SetDims(const std::vector &dims); }; } // namespace lite } // namespace mindspore diff --git a/mindspore/lite/src/ops/transpose.cc b/mindspore/lite/src/ops/transpose.cc index 67a223f4c9..2e858e8605 100644 --- a/mindspore/lite/src/ops/transpose.cc +++ b/mindspore/lite/src/ops/transpose.cc @@ -52,14 +52,17 @@ int Transpose::InferShape(std::vector inputs_, std::vectorprimitive->value_as_Transpose(); - int conjugate = transpore_prim->conjugate(); + + int conjugate = GetConjugate(); if (conjugate) { MS_LOG(ERROR) << "Transpose conjugate is not support currently"; return RET_ERROR; } std::vector perm; - perm.insert(perm.begin(), transpore_prim->perm()->begin(), transpore_prim->perm()->end()); + for (int i = 0; i < GetPerm().size(); i++) { + perm.push_back(GetPerm()[i]); + } +// perm.insert(perm.begin(), GetPerm().begin(), GetPerm().end()); std::vector in_shape = input->shape(); std::vector out_shape; out_shape.resize(perm.size()); diff --git a/mindspore/lite/src/populate_parameter.cc b/mindspore/lite/src/populate_parameter.cc index 6a3d9a4a86..acd5a3dea5 100644 --- a/mindspore/lite/src/populate_parameter.cc +++ b/mindspore/lite/src/populate_parameter.cc @@ -988,7 +988,7 @@ OpParameter *PopulateSliceParameter(const mindspore::lite::PrimitiveC *primitive } slice_param->param_length_ = static_cast(param_begin.size()); for (int32_t i = 0; i < slice_param->param_length_; ++i) { - slice_param->begin_[i] = param_begin[1]; + slice_param->begin_[i] = param_begin[i]; slice_param->size_[i] = param_size[i]; } return reinterpret_cast(slice_param);