You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

eval.py 2.6 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """evaluate imagenet"""
  16. import argparse
  17. import mindspore.nn as nn
  18. from mindspore import context
  19. from mindspore.train.model import Model
  20. from mindspore.train.serialization import load_checkpoint, load_param_into_net
  21. from src.config import efficientnet_b0_config_gpu as cfg
  22. from src.dataset import create_dataset_val
  23. from src.efficientnet import efficientnet_b0
  24. from src.loss import LabelSmoothingCrossEntropy
  25. if __name__ == '__main__':
  26. parser = argparse.ArgumentParser(description='image classification evaluation')
  27. parser.add_argument('--checkpoint', type=str, default='', help='checkpoint of efficientnet (Default: None)')
  28. parser.add_argument('--data_path', type=str, default='', help='Dataset path')
  29. parser.add_argument('--platform', type=str, default='GPU', choices=('Ascend', 'GPU'), help='run platform')
  30. args_opt = parser.parse_args()
  31. if args_opt.platform != 'GPU':
  32. raise ValueError("Only supported GPU training.")
  33. context.set_context(mode=context.GRAPH_MODE, device_target=args_opt.platform)
  34. net = efficientnet_b0(num_classes=cfg.num_classes,
  35. drop_rate=cfg.drop,
  36. drop_connect_rate=cfg.drop_connect,
  37. global_pool=cfg.gp,
  38. bn_tf=cfg.bn_tf,
  39. )
  40. ckpt = load_checkpoint(args_opt.checkpoint)
  41. load_param_into_net(net, ckpt)
  42. net.set_train(False)
  43. val_data_url = args_opt.data_path
  44. dataset = create_dataset_val(cfg.batch_size, val_data_url, workers=cfg.workers, distributed=False)
  45. loss = LabelSmoothingCrossEntropy(smooth_factor=cfg.smoothing)
  46. eval_metrics = {'Loss': nn.Loss(),
  47. 'Top1-Acc': nn.Top1CategoricalAccuracy(),
  48. 'Top5-Acc': nn.Top5CategoricalAccuracy()}
  49. model = Model(net, loss, optimizer=None, metrics=eval_metrics)
  50. metrics = model.eval(dataset)
  51. print("metric: ", metrics)