diff --git a/mindspore/nn/layer/activation.py b/mindspore/nn/layer/activation.py index fe98ca296a..0c4101e88b 100644 --- a/mindspore/nn/layer/activation.py +++ b/mindspore/nn/layer/activation.py @@ -249,11 +249,11 @@ class LeakyReLU(Cell): self.alpha = alpha def construct(self, x): - alpha = P.Cast()(F.scalar_to_array(self.alpha), P.DType()(x)) - if alpha <= 1: - out = P.Maximum()(alpha * x, x) + alpha_array = P.Cast()(F.scalar_to_array(self.alpha), P.DType()(x)) + if self.alpha <= 1: + out = P.Maximum()(alpha_array * x, x) else: - out = P.Minimum()(alpha * x, x) + out = P.Minimum()(alpha_array * x, x) return out