You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

config.py 2.1 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """
  16. network config setting, will be used in train.py and eval.py
  17. """
  18. from easydict import EasyDict as edict
  19. # config for dpn,imagenet-1K
  20. config = edict()
  21. # model config
  22. config.image_size = (224, 224) # inpute image size
  23. config.num_classes = 1000 # dataset class number
  24. config.backbone = 'dpn92' # backbone network
  25. config.is_save_on_master = True
  26. # parallel config
  27. config.num_parallel_workers = 4 # number of workers to read the data
  28. config.rank = 0 # local rank of distributed
  29. config.group_size = 1 # group size of distributed
  30. # training config
  31. config.batch_size = 32 # batch_size
  32. config.global_step = 0 # start step of learning rate
  33. config.epoch_size = 180 # epoch_size
  34. config.loss_scale_num = 1024 # loss scale
  35. # optimizer config
  36. config.momentum = 0.9 # momentum (SGD)
  37. config.weight_decay = 1e-4 # weight_decay (SGD)
  38. # learning rate config
  39. config.lr_schedule = 'warmup' # learning rate schedule
  40. config.lr_init = 0.01 # init learning rate
  41. config.lr_max = 0.1 # max learning rate
  42. config.factor = 0.1 # factor of lr to drop
  43. config.epoch_number_to_drop = [5, 15] # learing rate will drop after these epochs
  44. config.warmup_epochs = 5 # warmup epochs in learning rate schedule
  45. # dataset config
  46. config.dataset = "imagenet-1K" # dataset
  47. config.label_smooth = False # label_smooth
  48. config.label_smooth_factor = 0.0 # label_smooth_factor
  49. # parameter save config
  50. config.keep_checkpoint_max = 3 # only keep the last keep_checkpoint_max checkpoint