|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471 |
- /**
- * \file src/opr/impl/imgproc.cpp
- * MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
- *
- * Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or
- * implied.
- */
-
- #include "megbrain/opr/imgproc.h"
- #include "./internal/megdnn_opr_wrapper.inl"
- #include "megbrain/graph/grad_impl.h"
- #include "megbrain/opr/utility.h"
-
- using namespace mgb;
- using namespace opr;
-
- /* ======================= WarpPerspectiveForward ======================= */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpPerspectiveForward);
-
- WarpPerspectiveForward::WarpPerspectiveForward(VarNode* src, VarNode* mat,
- VarNode* mat_idx,
- VarNode* out_shape,
- const Param& param,
- const OperatorNodeConfig& config)
- : Super(OperatorNodeBaseCtorParam{
- src->owner_graph(), config, "warp_perspective", {src, mat}}) {
- init_megdnn_opr(*this, param);
- if (mat_idx) {
- add_input({src, mat, mat_idx, out_shape});
- } else {
- add_input({src, mat, out_shape});
- }
- outshape_by_symvar_enable(input().size() - 1, input().size() - 1);
- }
-
- SymbolVar WarpPerspectiveForward::make(SymbolVar i0, SymbolVar i1, SymbolVar i2,
- SymbolVar i3, const Param& param,
- const OperatorNodeConfig& config) {
- return i0.insert_single_output_opr<WarpPerspectiveForward>(
- i0.node(), i1.node(), i2.node(), i3.node(), param, config);
- }
-
- void WarpPerspectiveForward::init_output_dtype() {
- output(0)->dtype(input(0)->dtype());
- }
-
- void WarpPerspectiveForward::add_input_layout_constraint() {
- mixin::megdnn_utils::add_input_layout_constraint_contig(*this);
- }
-
- void WarpPerspectiveForward::outshape_by_symvar_do_get_output_shape(
- TensorShape& dest, const ShapeInferInfo& shpinfo) {
- TensorShape oshp2d;
- cg::copy_tensor_value_to_shape(oshp2d, *shpinfo.shpval_inp_val.at(0));
- auto imgshp = shpinfo.shape_inp_shp.at(0),
- matshp = shpinfo.shape_inp_shp.at(1);
- mgb_assert((imgshp.ndim == 4 || imgshp.ndim == 5) && matshp.ndim == 3 &&
- oshp2d.ndim == 2 && matshp.shape[1] == 3 &&
- matshp.shape[2] == 3,
- "shape mismatch for WarpPerspectiveForward: img=%s mat=%s "
- "out2d=%s",
- imgshp.to_string().c_str(), matshp.to_string().c_str(),
- oshp2d.to_string().c_str());
- if (input().size() == 3) {
- mgb_assert(imgshp[0] == matshp[0],
- "batchsize mismatch: img=%zu mat=%zu", imgshp[0], matshp[0]);
- } else {
- mgb_assert(input().size() == 4);
- auto mat_idx_shp = shpinfo.shape_inp_shp.at(2);
- mgb_assert(mat_idx_shp[0] == matshp[0] && mat_idx_shp.ndim == 1,
- "invalid mat_idx shape: mat=%zu mat_idx=%s", matshp[0],
- mat_idx_shp.to_string().c_str());
- }
-
- //! The index of height, e.g.,[b, h, w, c], the height_idx = 1
- size_t height_idx = 0;
- if (param().format == Param::Format::NCHW ||
- param().format == Param::Format::NCHW4) {
- height_idx = 2;
- } else {
- height_idx = 1;
- }
-
- dest = imgshp;
- dest[0] = matshp[0];
- if (param().format == Param::Format::NHWCD4) {
- dest.shape[height_idx] = oshp2d.shape[0];
- dest.shape[height_idx + 2] = oshp2d.shape[1];
- } else {
- for (int i = 0; i < 2; ++i)
- dest.shape[height_idx + i] = oshp2d.shape[i];
- }
- }
-
- void WarpPerspectiveForward::init_output_static_infer_desc() {
- Super::init_output_static_infer_desc();
- init_output_static_infer_desc_workspace(false);
- }
-
- void WarpPerspectiveForward::scn_do_execute() {
- if (input().size() == 3) {
- intl::_MegDNNOprMethInvoker<2, 1>::exec(megdnn_opr(), this);
- } else {
- intl::_MegDNNOprMethInvoker<3, 1>::exec(megdnn_opr(), this);
- }
- }
-
- size_t WarpPerspectiveForward::get_workspace_size_bytes(
- const TensorShapeArray& input_shapes,
- const TensorShapeArray& output_shapes) const {
- if (input().size() == 3) {
- return intl::_MegDNNOprMethInvoker<2, 1>::get_workspace_in_bytes(
- megdnn_opr(), this, input_shapes, output_shapes);
- } else {
- return intl::_MegDNNOprMethInvoker<3, 1>::get_workspace_in_bytes(
- megdnn_opr(), this, input_shapes, output_shapes);
- }
- }
-
- void WarpPerspectiveForward::record_execute_deps(ExecDependencyArray& deps) {
- record_megdnn_opr(deps);
- }
-
- #if MGB_ENABLE_GRAD
- MGB_IMPL_OPR_GRAD(WarpPerspectiveForward) {
- if (opr.input().size() == 4) {
- if (wrt_idx == 0) {
- // wrt data
- SymbolVar grad = WarpPerspectiveBackwardData::make(
- opr.input(1), opr.input(2), out_grad[0], opr.input(0),
- opr.param());
- return grad.node();
- } else if (wrt_idx == 1) {
- // wrt mat
- SymbolVar grad = WarpPerspectiveBackwardMat::make(
- opr.input(0), opr.input(1), opr.input(2), out_grad[0],
- opr.param());
- return grad.node();
- } else {
- return InvalidGrad::make(opr, wrt_idx);
- }
- }
-
- mgb_assert(opr.input().size() == 3);
- if (wrt_idx == 0) {
- // wrt data
- SymbolVar grad = WarpPerspectiveBackwardData::make(
- opr.input(1), out_grad[0], opr.input(0), opr.param());
- return grad.node();
- } else if (wrt_idx == 1) {
- // wrt mat
- SymbolVar grad = WarpPerspectiveBackwardMat::make(
- opr.input(0), opr.input(1), out_grad[0], opr.param());
- return grad.node();
- } else
- return InvalidGrad::make(opr, wrt_idx);
- }
- #endif
-
- /* ====================== WarpPerspectiveBackwardData ====================== */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpPerspectiveBackwardData);
-
- WarpPerspectiveBackwardData::WarpPerspectiveBackwardData(
- VarNode* mat, VarNode* out_diff, VarNode* in_for_shape,
- const Param& param, const OperatorNodeConfig& config)
- : Super(OperatorNodeBaseCtorParam{mat->owner_graph(),
- config,
- "warp_perspective_bwd_data",
- {mat}},
- 2, false) {
- init_megdnn_opr(*this, param);
- add_input({mat, out_diff, in_for_shape});
- intl::MegDNNOprInitPostCtor<WarpPerspectiveBackwardData>::apply(*this);
- }
-
- WarpPerspectiveBackwardData::WarpPerspectiveBackwardData(
- VarNode* mat, VarNode* mat_idx, VarNode* out_diff,
- VarNode* in_for_shape, const Param& param,
- const OperatorNodeConfig& config)
- : Super(OperatorNodeBaseCtorParam{mat->owner_graph(),
- config,
- "warp_perspective_bwd_data",
- {mat, mat_idx}},
- 3, false) {
- init_megdnn_opr(*this, param);
- add_input({mat, mat_idx, out_diff, in_for_shape});
- intl::MegDNNOprInitPostCtor<WarpPerspectiveBackwardData>::apply(*this);
- }
-
- SymbolVar WarpPerspectiveBackwardData::make(SymbolVar i0, SymbolVar i1,
- SymbolVar i2, const Param& param,
- const OperatorNodeConfig& config) {
- intl::MegDNNOprInitInputsModifier<WarpPerspectiveBackwardData>::apply(
- param, {&i0, &i1, &i2});
- return i0.insert_single_output_opr<WarpPerspectiveBackwardData>(
- i0.node(), i1.node(), i2.node(), param, config);
- }
-
- SymbolVar WarpPerspectiveBackwardData::make(SymbolVar i0, SymbolVar i1,
- SymbolVar i2, SymbolVar i3,
- const Param& param,
- const OperatorNodeConfig& config) {
- intl::MegDNNOprInitInputsModifier<WarpPerspectiveBackwardData>::apply(
- param, {&i0, &i1, &i2, &i3});
- return i0.insert_single_output_opr<WarpPerspectiveBackwardData>(
- i0.node(), i1.node(), i2.node(), i3.node(), param, config);
- }
-
- void WarpPerspectiveBackwardData::scn_do_execute() {
- if (input().size() == 3) {
- megdnn_opr()->exec(input(0)->dev_tensor().as_megdnn(),
- input(1)->dev_tensor().as_megdnn(),
- output(0)->dev_tensor().as_megdnn(),
- intl::get_megdnn_workspace_from_var(output(1)));
- } else {
- mgb_assert(input().size() == 4);
- megdnn_opr()->exec(input(0)->dev_tensor().as_megdnn(),
- input(1)->dev_tensor().as_megdnn(),
- input(2)->dev_tensor().as_megdnn(),
- output(0)->dev_tensor().as_megdnn(),
- intl::get_megdnn_workspace_from_var(output(1)));
- }
- }
-
- /* ====================== WarpPerspectiveBackwardMat ====================== */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpPerspectiveBackwardMat);
-
- WarpPerspectiveBackwardMat::WarpPerspectiveBackwardMat(
- VarNode* src, VarNode* mat, VarNode* mat_idx, VarNode* out_diff,
- const Param& param, const OperatorNodeConfig& config)
- : Super(OperatorNodeBaseCtorParam{src->owner_graph(),
- config,
- "warp_perspective_bwd_mat",
- {src, mat, mat_idx}},
- 1, true) {
- init_megdnn_opr(*this, param);
- if (mat_idx) {
- add_input({src, mat, mat_idx, out_diff});
- } else {
- add_input({src, mat, out_diff});
- }
- intl::MegDNNOprInitPostCtor<WarpPerspectiveBackwardMat>::apply(*this);
- }
-
- void WarpPerspectiveBackwardMat::scn_do_execute() {
- if (input().size() == 3) {
- megdnn_opr()->exec(input(0)->dev_tensor().as_megdnn(),
- input(1)->dev_tensor().as_megdnn(),
- input(2)->dev_tensor().as_megdnn(),
- output(0)->dev_tensor().as_megdnn(),
- intl::get_megdnn_workspace_from_var(output(1)));
- } else {
- mgb_assert(input().size() == 4);
- megdnn_opr()->exec(input(0)->dev_tensor().as_megdnn(),
- input(1)->dev_tensor().as_megdnn(),
- input(2)->dev_tensor().as_megdnn(),
- input(3)->dev_tensor().as_megdnn(),
- output(0)->dev_tensor().as_megdnn(),
- intl::get_megdnn_workspace_from_var(output(1)));
- }
- }
-
- SymbolVar WarpPerspectiveBackwardMat::make(
- SymbolVar i0, SymbolVar i1, SymbolVar i2, SymbolVar i3,
- const Param& param, const OperatorNodeConfig& config) {
- intl::MegDNNOprInitInputsModifier<WarpPerspectiveBackwardMat>::apply(
- param, {&i0, &i1, &i2, &i3});
- return i0.insert_single_output_opr<WarpPerspectiveBackwardMat>(
- i0.node(), i1.node(), i2.node(), i3.node(), param, config);
- }
-
- /* ====================== Cv operator ====================== */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RotateForward);
- MEGDNN_OPR_INIT1(RotateForward, "rotate")
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(CvtColorForward);
- MEGDNN_OPR_INIT1(CvtColorForward, "cvt_color")
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(GaussianBlurForward);
- MEGDNN_OPR_INIT1(GaussianBlurForward, "gaussion_blur")
-
- /* ======================= ResizeForward ======================= */
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ResizeForward);
- MEGDNN_OPR_INIT2(ResizeForward, "resize")
-
- void ResizeForward::init_output_dtype() {
- output(0)->dtype(input(0)->dtype());
- outshape_by_symvar_enable(1, 1);
- }
-
- void ResizeForward::add_input_layout_constraint() {
- if (param().format != Param::Format::NCHW) {
- input(0)->add_layout_constraint_contiguous();
- }
- input(1)->add_layout_constraint_contiguous();
- }
-
- void ResizeForward::outshape_by_symvar_do_get_output_shape(
- TensorShape& dest, const ShapeInferInfo& shpinfo) {
- TensorShape oshp2d;
- cg::copy_tensor_value_to_shape(oshp2d, *shpinfo.shpval_inp_val.at(0));
- auto imgshp = shpinfo.shape_inp_shp.at(0);
- mgb_assert((imgshp.ndim == 4 || imgshp.ndim == 5) && oshp2d.ndim == 2,
- "shape mismatch for ResizeForward: img=%s out2d=%s",
- imgshp.to_string().c_str(), oshp2d.to_string().c_str());
-
- //! The index of height, e.g.,[b, h, w, c], the height_idx = 1
- size_t height_idx = 0;
- if (param().format == Param::Format::NCHW ||
- param().format == Param::Format::NCHW4) {
- height_idx = 2;
- } else {
- height_idx = 1;
- }
-
- dest = imgshp;
- if (param().format == Param::Format::NHWCD4) {
- dest.shape[height_idx] = oshp2d.shape[0];
- dest.shape[height_idx + 2] = oshp2d.shape[1];
- } else {
- for (int i = 0; i < 2; ++i)
- dest.shape[height_idx + i] = oshp2d.shape[i];
- }
- }
-
- void ResizeForward::init_output_static_infer_desc() {
- Super::init_output_static_infer_desc();
- init_output_static_infer_desc_workspace(false);
- }
-
- void ResizeForward::scn_do_execute() {
- intl::MegDNNOprMethInvoker<megdnn::Resize>::exec(megdnn_opr(), this);
- }
-
- size_t ResizeForward::get_workspace_size_bytes(
- const TensorShapeArray& input_shapes,
- const TensorShapeArray& output_shapes) const {
- return intl::MegDNNOprMethInvoker<megdnn::Resize>::get_workspace_in_bytes(
- megdnn_opr(), this, input_shapes, output_shapes);
- }
-
- void ResizeForward::record_execute_deps(ExecDependencyArray& deps) {
- record_megdnn_opr(deps);
- }
-
- #if MGB_ENABLE_GRAD
- MGB_IMPL_OPR_GRAD(ResizeForward) {
- mgb_assert(opr.input().size() == 2);
- if (wrt_idx == 0) {
- SymbolVar grad =
- ResizeBackward::make(out_grad[0], opr.input(0), opr.param());
- return grad.node();
- } else
- return InvalidGrad::make(opr, wrt_idx);
- }
- #endif
-
- /* ====================== ResizeBackward ====================== */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(ResizeBackward);
- MEGDNN_OPR_INIT2(ResizeBackward, "resize_bwd", 1, false);
-
- /* ======================= WarpAffineForward ======================= */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(WarpAffineForward);
- MEGDNN_OPR_INIT3(WarpAffineForward, "warp_affine")
-
- void WarpAffineForward::init_output_dtype() {
- output(0)->dtype(input(0)->dtype());
- outshape_by_symvar_enable(2, 2);
- }
-
- void WarpAffineForward::add_input_layout_constraint() {
- mixin::megdnn_utils::add_input_layout_constraint_contig(*this);
- }
-
- void WarpAffineForward::outshape_by_symvar_do_get_output_shape(
- TensorShape& dest, const ShapeInferInfo& shpinfo) {
- TensorShape oshp2d;
- cg::copy_tensor_value_to_shape(oshp2d, *shpinfo.shpval_inp_val.at(0));
- auto imgshp = shpinfo.shape_inp_shp.at(0),
- matshp = shpinfo.shape_inp_shp.at(1);
- mgb_assert((imgshp.ndim == 4 || imgshp.ndim == 5) && matshp.ndim == 3 &&
- oshp2d.ndim == 2 && matshp.shape[0] == imgshp.shape[0] &&
- matshp.shape[1] == 2 && matshp.shape[2] == 3,
- "shape mismatch for WarpAffineForward: img=%s mat=%s out2d=%s",
- imgshp.to_string().c_str(), matshp.to_string().c_str(),
- oshp2d.to_string().c_str());
-
- size_t height_idx = 0;
- if (param().format == Param::Format::NCHW) {
- height_idx = 2;
- } else {
- height_idx = 1;
- }
-
- dest = imgshp;
- if (param().format == Param::Format::NHWCD4) {
- dest.shape[height_idx] = oshp2d.shape[0];
- dest.shape[height_idx + 2] = oshp2d.shape[1];
- } else {
- for (int i = 0; i < 2; ++i)
- dest.shape[height_idx + i] = oshp2d.shape[i];
- }
- }
-
- void WarpAffineForward::init_output_static_infer_desc() {
- Super::init_output_static_infer_desc();
- init_output_static_infer_desc_workspace(false);
- }
-
- void WarpAffineForward::scn_do_execute() {
- intl::MegDNNOprMethInvoker<megdnn::WarpAffine>::exec(megdnn_opr(), this);
- }
-
- size_t WarpAffineForward::get_workspace_size_bytes(
- const TensorShapeArray& input_shapes,
- const TensorShapeArray& output_shapes) const {
- return intl::MegDNNOprMethInvoker<
- megdnn::WarpAffine>::get_workspace_in_bytes(megdnn_opr(), this,
- input_shapes,
- output_shapes);
- }
-
- void WarpAffineForward::record_execute_deps(ExecDependencyArray& deps) {
- record_megdnn_opr(deps);
- }
-
- /* ======================= RemapForward ======================= */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemapForward);
- MEGDNN_OPR_INIT2(RemapForward, "remap")
-
- void RemapForward::init_output_dtype() {
- output(0)->dtype(input(0)->dtype());
- }
-
- #if MGB_ENABLE_GRAD
- MGB_IMPL_OPR_GRAD(RemapForward) {
- mgb_assert(opr.input().size() == 2);
- if (wrt_idx == 0) {
- SymbolVar grad =
- RemapBackwardData::make(opr.input(1), out_grad[0],
- opr.input(0), opr.param());
- return grad.node();
- } else if (wrt_idx == 1) {
- SymbolVar grad =
- RemapBackwardMat::make(opr.input(0), opr.input(1),
- out_grad[0], opr.param());
- return grad.node();
- } else
- return InvalidGrad::make(opr, wrt_idx);
- }
- #endif
-
- /* ====================== RemapBackward ====================== */
-
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemapBackwardData);
- MEGDNN_OPR_INIT3(RemapBackwardData, "remap_bwd_data", 2, false);
- MGB_DYN_TYPE_OBJ_FINAL_IMPL(RemapBackwardMat);
- MEGDNN_OPR_INIT3(RemapBackwardMat, "remap_bwd_mat", 1, true);
-
- // vim: syntax=cpp.doxygen foldmethod=marker foldmarker=f{{{,f}}}
|