From 75031bfc1d934e87d1f590f1013c2cbc9a5539c5 Mon Sep 17 00:00:00 2001 From: zhouyuanshen Date: Tue, 24 Nov 2020 16:34:39 +0800 Subject: [PATCH] add supports to get gradients of op maximumgrad, minimumgrad and relugrad --- mindspore/ops/_grad/grad_implementations.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/mindspore/ops/_grad/grad_implementations.py b/mindspore/ops/_grad/grad_implementations.py index 0ada82c847..2ba3fbd55f 100644 --- a/mindspore/ops/_grad/grad_implementations.py +++ b/mindspore/ops/_grad/grad_implementations.py @@ -21,6 +21,24 @@ from .grad_base import bprops # Unused parameters are placeholders. +@bprops.register("MaximumGrad") +def bprop_maximum_grad_grad(x, y, z, out, dout): + """Backpropagator for primitive `MaximumGrad`.""" + return F.zeros_like(x), F.zeros_like(y), F.zeros_like(z) + + +@bprops.register("MinimumGrad") +def bprop_minimum_grad_grad(x, y, z, out, dout): + """Backpropagator for primitive `MinimumGrad`.""" + return F.zeros_like(x), F.zeros_like(y), F.zeros_like(z) + + +@bprops.register("ReluGrad") +def bprop_relu_grad_grad(x, y, out, dout): + """Backpropagator for primitive `ReluGrad`.""" + return F.zeros_like(x), F.zeros_like(y) + + @bprops.register("scalar_add") def bprop_scalar_add(x, y, out, dout): """Backpropagator for primitive `scalar_add`."""