Browse Source

1.add adapter of Mod, MaxPool3D and BCEWithLogitsLoss operators for graphengine.

2.fix the backword of matrixinverse.
pull/14811/head
wangshuide2020 5 years ago
parent
commit
4e8bfc2862
8 changed files with 57 additions and 5 deletions
  1. +4
    -0
      mindspore/ccsrc/transform/graph_ir/op_adapter_map.h
  2. +6
    -0
      mindspore/ccsrc/transform/graph_ir/op_declare/elewise_calculation_ops_declare.cc
  3. +3
    -0
      mindspore/ccsrc/transform/graph_ir/op_declare/elewise_calculation_ops_declare.h
  4. +8
    -0
      mindspore/ccsrc/transform/graph_ir/op_declare/nn_norm_ops_declare.cc
  5. +3
    -0
      mindspore/ccsrc/transform/graph_ir/op_declare/nn_norm_ops_declare.h
  6. +21
    -0
      mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.cc
  7. +6
    -0
      mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.h
  8. +6
    -5
      mindspore/ops/_grad/grad_math_ops.py

+ 4
- 0
mindspore/ccsrc/transform/graph_ir/op_adapter_map.h View File

@@ -58,6 +58,8 @@ constexpr const char kNameEqual[] = "Equal";
constexpr const char kNameNotEqual[] = "NotEqual";
constexpr const char kNameFlattenGrad[] = "FlattenGrad";
constexpr const char kNameConvolution[] = "Convolution";
constexpr const char kNameMaxPool3D[] = "MaxPool3D";
constexpr const char kNameMaxPool3DGrad[] = "MaxPool3DGrad";
constexpr const char kNameBiasAdd[] = "BiasAdd";
constexpr const char kNameMaxPoolGrad[] = "MaxPoolGrad";
constexpr const char kNameRsqrtGrad[] = "RsqrtGrad";
@@ -101,6 +103,7 @@ constexpr const char kNameSmoothL1LossGrad[] = "SmoothL1LossGrad";
constexpr const char kNameSGD[] = "SGD";
constexpr const char kNameSigmoidCrossEntropyWithLogits[] = "SigmoidCrossEntropyWithLogits";
constexpr const char kNameSigmoidCrossEntropyWithLogitsGrad[] = "SigmoidCrossEntropyWithLogitsGrad";
constexpr const char kNameSigmoidCrossEntropyWithLogitsV2[] = "BCEWithLogitsLoss";
constexpr const char kNameScatterNdD[] = "ScatterNd";
constexpr const char kNamePadD[] = "Pad";
constexpr const char kNameMirrorPad[] = "MirrorPad";
@@ -132,6 +135,7 @@ constexpr const char kNameBitwiseXor[] = "BitwiseXor";
constexpr const char kNameCeil[] = "Ceil";
constexpr const char kNameCosineEmbeddingLoss[] = "CosineEmbeddingLoss";
constexpr const char kNameXdivy[] = "Xdivy";
constexpr const char kNameMod[] = "Mod";
constexpr const char kNameTile[] = "Tile";
constexpr const char kNameCos[] = "Cos";
constexpr const char kNameCosh[] = "Cosh";


+ 6
- 0
mindspore/ccsrc/transform/graph_ir/op_declare/elewise_calculation_ops_declare.cc View File

@@ -185,6 +185,12 @@ ATTR_MAP(Xdivy) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Xdivy) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(Xdivy, kNameXdivy, ADPT_DESC(Xdivy))

// Mod
INPUT_MAP(Mod) = {{1, INPUT_DESC(x1)}, {2, INPUT_DESC(x2)}};
ATTR_MAP(Mod) = EMPTY_ATTR_MAP;
OUTPUT_MAP(Mod) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(Mod, kNameMod, ADPT_DESC(Mod))

// Exp
INPUT_MAP(Exp) = {{1, INPUT_DESC(x)}};
ATTR_MAP(Exp) = EMPTY_ATTR_MAP;


+ 3
- 0
mindspore/ccsrc/transform/graph_ir/op_declare/elewise_calculation_ops_declare.h View File

@@ -105,6 +105,9 @@ DECLARE_OP_USE_OUTPUT(CosineEmbeddingLoss)
DECLARE_OP_ADAPTER(Xdivy)
DECLARE_OP_USE_OUTPUT(Xdivy)

DECLARE_OP_ADAPTER(Mod)
DECLARE_OP_USE_OUTPUT(Mod)

DECLARE_OP_ADAPTER(Cast)
DECLARE_OP_USE_INPUT_ATTR(Cast)
DECLARE_OP_USE_OUTPUT(Cast)


+ 8
- 0
mindspore/ccsrc/transform/graph_ir/op_declare/nn_norm_ops_declare.cc View File

@@ -66,6 +66,14 @@ OUTPUT_MAP(SigmoidCrossEntropyWithLogitsGrad) = {{0, OUTPUT_DESC(gradient)}};
REG_ADPT_DESC(SigmoidCrossEntropyWithLogitsGrad, kNameSigmoidCrossEntropyWithLogitsGrad,
ADPT_DESC(SigmoidCrossEntropyWithLogitsGrad))

// SigmoidCrossEntropyWithLogitsV2
INPUT_MAP(SigmoidCrossEntropyWithLogitsV2) = {
{1, INPUT_DESC(predict)}, {2, INPUT_DESC(target)}, {3, INPUT_DESC(weight)}, {4, INPUT_DESC(pos_weight)}};
ATTR_MAP(SigmoidCrossEntropyWithLogitsV2) = {{"reduction", ATTR_DESC(reduction, AnyTraits<std::string>())}};
OUTPUT_MAP(SigmoidCrossEntropyWithLogitsV2) = {{0, OUTPUT_DESC(loss)}};
REG_ADPT_DESC(SigmoidCrossEntropyWithLogitsV2, kNameSigmoidCrossEntropyWithLogitsV2,
ADPT_DESC(SigmoidCrossEntropyWithLogitsV2))

// LogSoftmaxGrad
INPUT_MAP(LogSoftmaxGrad) = {{1, INPUT_DESC(x)}, {2, INPUT_DESC(grad)}};
ATTR_MAP(LogSoftmaxGrad) = {


+ 3
- 0
mindspore/ccsrc/transform/graph_ir/op_declare/nn_norm_ops_declare.h View File

@@ -35,6 +35,9 @@ DECLARE_OP_USE_OUTPUT(SigmoidCrossEntropyWithLogits)
DECLARE_OP_ADAPTER(SigmoidCrossEntropyWithLogitsGrad)
DECLARE_OP_USE_OUTPUT(SigmoidCrossEntropyWithLogitsGrad)

DECLARE_OP_ADAPTER(SigmoidCrossEntropyWithLogitsV2)
DECLARE_OP_USE_OUTPUT(SigmoidCrossEntropyWithLogitsV2)

DECLARE_OP_ADAPTER(LogSoftmaxGrad)
DECLARE_OP_USE_OUTPUT(LogSoftmaxGrad)



+ 21
- 0
mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.cc View File

@@ -27,6 +27,27 @@ ATTR_MAP(MaxPool) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyT
OUTPUT_MAP(MaxPool) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(MaxPool, kNameMaxPool, ADPT_DESC(MaxPool))

// MaxPool3D
INPUT_MAP(MaxPool3D) = {{1, INPUT_DESC(x)}};
ATTR_MAP(MaxPool3D) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"strides", ATTR_DESC(strides, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"pad_mode", ATTR_DESC(padding, AnyTraits<std::string>())},
{"pad_list", ATTR_DESC(pads, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"dilation", ATTR_DESC(dilation, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"ceil_mode", ATTR_DESC(ceil_mode, AnyTraits<int64_t>())},
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(MaxPool3D) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(MaxPool3D, kNameMaxPool3D, ADPT_DESC(MaxPool3D))

// MaxPool3DGrad
INPUT_MAP(MaxPool3DGrad) = {{1, INPUT_DESC(orig_x)}, {2, INPUT_DESC(orig_y)}, {3, INPUT_DESC(grads)}};
ATTR_MAP(MaxPool3DGrad) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"strides", ATTR_DESC(strides, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"pad_list", ATTR_DESC(pads, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},
{"format", ATTR_DESC(data_format, AnyTraits<std::string>())}};
OUTPUT_MAP(MaxPool3DGrad) = {{0, OUTPUT_DESC(y)}};
REG_ADPT_DESC(MaxPool3DGrad, kNameMaxPool3DGrad, ADPT_DESC(MaxPool3DGrad))

// AvgPool
INPUT_MAP(AvgPool) = {{1, INPUT_DESC(x)}};
ATTR_MAP(AvgPool) = {{"kernel_size", ATTR_DESC(ksize, AnyTraits<int64_t>(), AnyTraits<std::vector<int64_t>>())},


+ 6
- 0
mindspore/ccsrc/transform/graph_ir/op_declare/nn_pooling_ops_declare.h View File

@@ -35,6 +35,12 @@ DECLARE_OP_USE_OUTPUT(MaxPool)
DECLARE_OP_ADAPTER(MaxPoolGrad)
DECLARE_OP_USE_OUTPUT(MaxPoolGrad)

DECLARE_OP_ADAPTER(MaxPool3D)
DECLARE_OP_USE_OUTPUT(MaxPool3D)

DECLARE_OP_ADAPTER(MaxPool3DGrad)
DECLARE_OP_USE_OUTPUT(MaxPool3DGrad)

DECLARE_OP_ADAPTER(AvgPool)
DECLARE_OP_USE_OUTPUT(AvgPool)



+ 6
- 5
mindspore/ops/_grad/grad_math_ops.py View File

@@ -18,6 +18,7 @@
from functools import reduce
import numpy as np
import mindspore as ms
from mindspore import nn
from mindspore.ops import _selected_grad_ops as SG
from .. import functional as F
from .. import operations as P
@@ -178,15 +179,15 @@ def get_bprop_tensor_add(self):
@bprop_getters.register(P.MatrixInverse)
def get_bprop_matrix_inverse(self):
"""Grad definition for `MatrixInverse` operation."""
batchmatmul_a = P.math_ops.BatchMatMul(transpose_a=True)
batchmatmul_b = P.math_ops.BatchMatMul(transpose_b=True)
matmul_x1 = nn.MatMul(transpose_x1=True)
matmul_x2 = nn.MatMul(transpose_x2=True)
neg = P.Neg()

def bprop(x, out, dout):
dx = batchmatmul_b(dout, out)
dx = batchmatmul_a(out, dx)
dx = matmul_x2(dout, out)
dx = matmul_x1(out, dx)
dx = neg(dx)
return dx
return (dx,)

return bprop



Loading…
Cancel
Save