Browse Source

!8953 Add support to get gradients of op maximumgrad , minimumgrad and relugrad.

From: @yuan_shen_zhou
Reviewed-by: @liangchenghui,@c_34
Signed-off-by: @liangchenghui
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
ea94eec978
1 changed files with 18 additions and 0 deletions
  1. +18
    -0
      mindspore/ops/_grad/grad_implementations.py

+ 18
- 0
mindspore/ops/_grad/grad_implementations.py View File

@@ -21,6 +21,24 @@ from .grad_base import bprops
# Unused parameters are placeholders. # Unused parameters are placeholders.




@bprops.register("MaximumGrad")
def bprop_maximum_grad_grad(x, y, z, out, dout):
"""Backpropagator for primitive `MaximumGrad`."""
return F.zeros_like(x), F.zeros_like(y), F.zeros_like(z)


@bprops.register("MinimumGrad")
def bprop_minimum_grad_grad(x, y, z, out, dout):
"""Backpropagator for primitive `MinimumGrad`."""
return F.zeros_like(x), F.zeros_like(y), F.zeros_like(z)


@bprops.register("ReluGrad")
def bprop_relu_grad_grad(x, y, out, dout):
"""Backpropagator for primitive `ReluGrad`."""
return F.zeros_like(x), F.zeros_like(y)


@bprops.register("scalar_add") @bprops.register("scalar_add")
def bprop_scalar_add(x, y, out, dout): def bprop_scalar_add(x, y, out, dout):
"""Backpropagator for primitive `scalar_add`.""" """Backpropagator for primitive `scalar_add`."""


Loading…
Cancel
Save