Browse Source

!4359 fix layernorm grad error in pynative mode

Merge pull request !4359 from chujinjin/fix_layernorm_grad_error_in_pynative
tags/v0.7.0-beta
mindspore-ci-bot Gitee 5 years ago
parent
commit
61a2274787
1 changed files with 1 additions and 1 deletions
  1. +1
    -1
      mindspore/ccsrc/backend/optimizer/ascend/ascend_backend_optimization.cc

+ 1
- 1
mindspore/ccsrc/backend/optimizer/ascend/ascend_backend_optimization.cc View File

@@ -267,11 +267,11 @@ void AscendBackendIRFusionOptimization(const std::shared_ptr<session::KernelGrap
ir_fusion_pm->AddPass(std::make_shared<BnGradSplit>());
} else {
ir_fusion_pm->AddPass(std::make_shared<BatchNormGradSplit>());
ir_fusion_pm->AddPass(std::make_shared<LayerNormGradSplit>());
ir_fusion_pm->AddPass(std::make_shared<FusedBatchNormFusion>());
ir_fusion_pm->AddPass(std::make_shared<FusedBatchNormMixPrecisionFusion0>());
ir_fusion_pm->AddPass(std::make_shared<FusedBatchNormMixPrecisionFusion1>());
}
ir_fusion_pm->AddPass(std::make_shared<LayerNormGradSplit>());
ir_fusion_pm->AddPass(std::make_shared<InsertPadForNMSWithMask>());
AddAscendIRFusionRulesPass(ir_fusion_pm.get());
AddAscendIRFusionPass(ir_fusion_pm.get());


Loading…
Cancel
Save