diff --git a/mindspore/nn/optim/ftrl.py b/mindspore/nn/optim/ftrl.py index d193581d24..38e95f71cd 100644 --- a/mindspore/nn/optim/ftrl.py +++ b/mindspore/nn/optim/ftrl.py @@ -195,7 +195,7 @@ class FTRL(Optimizer): if value == 'CPU': self.sparse_opt = P.FusedSparseFtrl(self.lr, self.l1, self.l2, self.lr_power, self.use_locking) - self.sparse_opt.add_prim_attr("primitive", "CPU") + self.sparse_opt.add_prim_attr("primitive_target", "CPU") else: self.sparse_opt = P.SparseApplyFtrl(self.lr, self.l1, self.l2, self.lr_power, self.use_locking) diff --git a/mindspore/nn/optim/proximal_ada_grad.py b/mindspore/nn/optim/proximal_ada_grad.py index 4529b201c5..edd1f6947b 100644 --- a/mindspore/nn/optim/proximal_ada_grad.py +++ b/mindspore/nn/optim/proximal_ada_grad.py @@ -163,7 +163,7 @@ class ProximalAdagrad(Optimizer): raise ValueError("The value must be 'CPU' or 'Ascend', but got value {}".format(value)) if value == 'CPU': - self.sparse_opt = P.FusedSparseProximalAdagrad(self.use_locking).add_prim_attr("primitive", "CPU") + self.sparse_opt = P.FusedSparseProximalAdagrad(self.use_locking).add_prim_attr("primitive_target", "CPU") else: self.sparse_opt = P.SparseApplyProximalAdagrad(self.use_locking)