Browse Source

!15376 fixed not support ps float16

From: @anancds
Reviewed-by: @limingqi107,@cristoval
Signed-off-by: @cristoval
pull/15376/MERGE
mindspore-ci-bot Gitee 4 years ago
parent
commit
0fd6b0a7ce
1 changed files with 4 additions and 2 deletions
  1. +4
    -2
      model_zoo/official/cv/resnet/train.py

+ 4
- 2
model_zoo/official/cv/resnet/train.py View File

@@ -207,12 +207,14 @@ if __name__ == '__main__':
metrics = {"acc"}
if args_opt.run_distribute:
metrics = {'acc': DistAccuracy(batch_size=config.batch_size, device_num=args_opt.device_num)}
model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=metrics,
amp_level="O2", keep_batchnorm_fp32=False, eval_network=dist_eval_network)
if (args_opt.net != "resnet101" and args_opt.net != "resnet50") or \
args_opt.parameter_server or target == "CPU":
## fp32 training
model = Model(net, loss_fn=loss, optimizer=opt, metrics=metrics, eval_network=dist_eval_network)
else:
model = Model(net, loss_fn=loss, optimizer=opt, loss_scale_manager=loss_scale, metrics=metrics,
amp_level="O2", keep_batchnorm_fp32=False, eval_network=dist_eval_network)

if cfg.optimizer == "Thor" and args_opt.dataset == "imagenet2012":
from src.lr_generator import get_thor_damping
damping = get_thor_damping(0, config.damping_init, config.damping_decay, 70, step_size)


Loading…
Cancel
Save