You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

eval.py 1.9 kB

5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
5 years ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """
  16. eval.
  17. """
  18. from mindspore import nn
  19. from mindspore.train.model import Model
  20. from mindspore.common import dtype as mstype
  21. from src.dataset import create_dataset
  22. from src.config import set_config
  23. from src.args import eval_parse_args
  24. from src.models import define_net, load_ckpt
  25. from src.utils import switch_precision, set_context
  26. if __name__ == '__main__':
  27. args_opt = eval_parse_args()
  28. config = set_config(args_opt)
  29. set_context(config)
  30. backbone_net, head_net, net = define_net(config, args_opt.is_training)
  31. load_ckpt(net, args_opt.pretrain_ckpt)
  32. switch_precision(net, mstype.float16, config)
  33. dataset = create_dataset(dataset_path=args_opt.dataset_path, do_train=False, config=config)
  34. step_size = dataset.get_dataset_size()
  35. if step_size == 0:
  36. raise ValueError("The step_size of dataset is zero. Check if the images count of eval dataset is more \
  37. than batch_size in config.py")
  38. net.set_train(False)
  39. loss = nn.SoftmaxCrossEntropyWithLogits(sparse=True, reduction='mean')
  40. model = Model(net, loss_fn=loss, metrics={'acc'})
  41. res = model.eval(dataset)
  42. print(f"result:{res}\npretrain_ckpt={args_opt.pretrain_ckpt}")