You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

cfg.h 1.8 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253
  1. /**
  2. * Copyright 2021 Huawei Technologies Co., Ltd
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at
  7. *
  8. * http://www.apache.org/licenses/LICENSE-2.0
  9. *
  10. * Unless required by applicable law or agreed to in writing, software
  11. * distributed under the License is distributed on an "AS IS" BASIS,
  12. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. * See the License for the specific language governing permissions and
  14. * limitations under the License.
  15. */
  16. #ifndef MINDSPORE_INCLUDE_API_CFG_H
  17. #define MINDSPORE_INCLUDE_API_CFG_H
  18. #include <cstddef>
  19. #include <string>
  20. #include <vector>
  21. #include <memory>
  22. #include "include/api/data_type.h"
  23. #include "include/api/dual_abi_helper.h"
  24. namespace mindspore {
  25. class MixPrecisionCfg {
  26. public:
  27. MixPrecisionCfg() {
  28. this->dynamic_loss_scale_ = false;
  29. this->loss_scale_ = 128.0f;
  30. this->num_of_not_nan_iter_th_ = 1000;
  31. }
  32. bool dynamic_loss_scale_ = false; /**< Enable\disable dynamic loss scale during mix precision training */
  33. float loss_scale_; /**< Initial loss scale factor */
  34. uint32_t num_of_not_nan_iter_th_; /**< a threshold for modifying loss scale when dynamic loss scale is enabled */
  35. bool is_raw_mix_precision_ = false; /**< Is mix precision model export from mindspore */
  36. };
  37. class TrainCfg {
  38. public:
  39. TrainCfg() { this->loss_name_ = "_loss_fn"; }
  40. OptimizationLevel optimization_level_ = kO0;
  41. std::string loss_name_; /**< Set part of the name that identify a loss kernel */
  42. MixPrecisionCfg mix_precision_cfg_; /**< Mix precision configuration */
  43. bool accumulate_gradients_ = false;
  44. };
  45. } // namespace mindspore
  46. #endif // MINDSPORE_INCLUDE_API_CFG_H