You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

parameter.py 16 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """Parameter for cell."""
  16. from copy import copy
  17. from .._c_expression import ParamValue
  18. from . import dtype as mstype
  19. from .initializer import initializer, Initializer
  20. from .tensor import Tensor, MetaTensor
  21. from .._checkparam import _check_str_by_regular
  22. from ..parallel._tensor import _get_slice_index
  23. from ..parallel._auto_parallel_context import auto_parallel_context
  24. __all__ = ['Parameter', 'ParameterTuple']
  25. PARAMETER_NAME_DEFAULT = "Parameter"
  26. PARAMETER_NAME_PREFIX_MAX_LEN = 1024
  27. def _is_in_parallel_mode():
  28. """Get parallel mode."""
  29. return auto_parallel_context().get_parallel_mode() in ["semi_auto_parallel", "auto_parallel"]
  30. class Parameter(MetaTensor):
  31. """
  32. Parameter types of cell models.
  33. After initialized `Parameter` is a subtype of `Tensor`.
  34. In auto_parallel mode of "semi_auto_parallel" and "auto_parallel", if init `Parameter` by
  35. a `Initializer`, the type of Parameter will be a `MetaTensor` not a `Tensor`. `MetaTensor`
  36. only save the shape type info of a tensor with no memory usage. The shape can be change while
  37. compile for auto-parallel. Call `init_data` will return a Tensor Parameter with initialized data.
  38. Note:
  39. Each parameter of Cell is represented by Parameter class.
  40. Args:
  41. default_input (Union[Tensor, Initializer]): Parameter data, when `default_input` is` Initializer`,
  42. the data stored by Parameter is `MetaTensor`, otherwise it is `Tensor`.
  43. name (str): Name of the child parameter.
  44. requires_grad (bool): True if the parameter requires gradient. Default: True.
  45. layerwise_parallel (bool): A kind of model parallel mode. When layerwise_parallel is true in paralle mode,
  46. broadcast and gradients communication would not be applied to parameters. Default: False.
  47. """
  48. __base_type__ = {}
  49. def __new__(cls, default_input, name, *args, **kwargs):
  50. input_class, *class_init_args = Parameter._get_parameter_new_args(default_input)
  51. new_type = Parameter._get_base_class(input_class)
  52. obj = input_class.__new__(new_type)
  53. input_class.__init__(obj, *class_init_args)
  54. # it's better to make the Initializer a kind of metatensor.
  55. obj.init_mode = None
  56. if not isinstance(obj, Tensor):
  57. obj.init_mode = default_input
  58. return obj
  59. def __reduce_ex__(self, _):
  60. data = self
  61. if self.init_mode is not None:
  62. data = self.init_mode
  63. else:
  64. # cast to break deep infinit loop while deepcopy
  65. data = Tensor(self)
  66. return (
  67. Parameter, (data, self.name, self.requires_grad, self.layerwise_parallel))
  68. def __init__(self, default_input, name, requires_grad=True, layerwise_parallel=False):
  69. self._value = ParamValue()
  70. self.name = name
  71. self.requires_grad = requires_grad
  72. self.layerwise_parallel = layerwise_parallel
  73. # this flag for tensor copy data.
  74. self.init_flag = False
  75. # this flag is for ge variable copy data.
  76. self._is_init = False
  77. self._inited_param = None
  78. self._sliced = False
  79. self.is_param_ps = False
  80. self._cast_type = None
  81. self.init_in_server = False
  82. @staticmethod
  83. def _get_base_class(input_class):
  84. input_class_name = f'Parameter{input_class.__name__}'
  85. if input_class_name in Parameter.__base_type__:
  86. new_type = Parameter.__base_type__[input_class_name]
  87. else:
  88. new_type = type(input_class_name, (Parameter, input_class), {})
  89. Parameter.__base_type__[input_class_name] = new_type
  90. return new_type
  91. @staticmethod
  92. def _get_parameter_new_args(data):
  93. """Set `default_input` of current `Parameter`."""
  94. if isinstance(data, bool):
  95. raise ValueError('Parameter data can not be `bool`')
  96. if isinstance(data, Initializer):
  97. if _is_in_parallel_mode():
  98. # do not init data while in auto parallel.
  99. return (MetaTensor, data.dtype, data.shape)
  100. data = data.to_tensor()
  101. if isinstance(data, Tensor):
  102. # make a copy of Tensor to init the parameter
  103. return (Tensor, data.asnumpy(),)
  104. if isinstance(data, int):
  105. return (Tensor, data, mstype.int32)
  106. if isinstance(data, float):
  107. return (Tensor, data, mstype.float32)
  108. return (Tensor, data)
  109. def __str__(self):
  110. value_str = MetaTensor.__str__(self)
  111. if isinstance(self, Tensor):
  112. value_str = Tensor.__str__(self)
  113. return f'Parameter (name={self._value.name}, value={value_str})'
  114. def __repr__(self):
  115. value_str = MetaTensor.__repr__(self)
  116. if isinstance(self, Tensor):
  117. value_str = Tensor.__repr__(self)
  118. return f'Parameter (name={self._value.name}, value={value_str})'
  119. def __parameter__(self):
  120. """For parse check."""
  121. def set_param_ps(self, init_in_server=False):
  122. self.is_param_ps = True
  123. self.init_in_server = init_in_server
  124. @property
  125. def inited_param(self):
  126. """Get the new parameter after call the init_data."""
  127. return self._inited_param
  128. @property
  129. def name(self):
  130. """Get the name of the parameter."""
  131. return self._value.name
  132. @name.setter
  133. def name(self, name_):
  134. """
  135. Define a name for the parameter.
  136. Args:
  137. name_ (`str` or `None`): The name of the parameter. When the parameter is None or an empty string,
  138. the default value `PARAMETER_NAME_DEFAULT` is used.
  139. """
  140. if name_ is None:
  141. name_ = PARAMETER_NAME_DEFAULT
  142. elif isinstance(name_, str):
  143. name_ = name_.strip()
  144. if name_ == '':
  145. name_ = PARAMETER_NAME_DEFAULT
  146. if len(name_) > PARAMETER_NAME_PREFIX_MAX_LEN:
  147. raise ValueError("The length of the '{}' name should be less than {}.".
  148. format(name_, PARAMETER_NAME_PREFIX_MAX_LEN))
  149. else:
  150. raise ValueError("The type of the name should be `str` or `None`.")
  151. self._value.name = name_
  152. @property
  153. def cast_type(self):
  154. return self._cast_type
  155. @cast_type.setter
  156. def cast_type(self, dst_type):
  157. if dst_type not in (mstype.float16, mstype.float32, None):
  158. raise ValueError("The type of the name should be type of [float32, float16] or `None`.")
  159. self._cast_type = dst_type
  160. @property
  161. def sliced(self):
  162. """Get slice status of the parameter."""
  163. return self._sliced
  164. @sliced.setter
  165. def sliced(self, sliced_):
  166. self._sliced = sliced_
  167. @property
  168. def is_init(self):
  169. """Get the initialization status of the parameter."""
  170. return self._is_init
  171. @is_init.setter
  172. def is_init(self, is_init_):
  173. """
  174. Set init status of the parameter.
  175. Args:
  176. is_init_ (bool): The init status of the parameter.
  177. """
  178. self._is_init = is_init_
  179. def clone(self, prefix, init='same'):
  180. """
  181. Clone the parameter.
  182. Args:
  183. prefix (str): Namespace of parameter.
  184. init (Union[Tensor, str, Initializer, numbers.Number]): Initialize the shape of the parameter.
  185. Default: 'same'.
  186. Returns:
  187. Parameter, a new parameter.
  188. """
  189. _check_str_by_regular(prefix)
  190. x = copy(self)
  191. # pylint: disable=protected-access
  192. x._value = self._value.clone()
  193. x._value.name = prefix + '.' + self._value.name
  194. x.is_init = False
  195. if init != 'same':
  196. shape = self.shape
  197. dtype = self.dtype
  198. x.default_input = initializer(init, shape=shape, dtype=dtype)
  199. return x
  200. @property
  201. def layerwise_parallel(self):
  202. return self._value.layerwise_parallel
  203. @layerwise_parallel.setter
  204. def layerwise_parallel(self, value=True):
  205. if not isinstance(value, bool):
  206. raise TypeError("`layerwise_parallel` parameter must be bool type")
  207. self._value.layerwise_parallel = value
  208. @property
  209. def requires_grad(self):
  210. """Return whether the parameter requires gradient."""
  211. return self._value.requires_grad
  212. @requires_grad.setter
  213. def requires_grad(self, value=True):
  214. if not isinstance(value, bool):
  215. raise TypeError("`requires_grad` parameter must be bool type")
  216. self._value.requires_grad = value
  217. @property
  218. def data(self):
  219. return self.default_input
  220. @property
  221. def default_input(self):
  222. return self
  223. @default_input.setter
  224. def default_input(self, data):
  225. self.set_parameter_data(data)
  226. def _update_tensor_data(self, data):
  227. "Update the parameter by a Tensor."
  228. if isinstance(self, Tensor):
  229. # for Tensor same shape:
  230. self.init_flag = False
  231. return self.assign_value(data)
  232. # create a new tensor
  233. return Parameter(data, self.name, self.requires_grad)
  234. def set_parameter_data(self, data, slice_shape=False):
  235. """
  236. Set `default_input` of current `Parameter`.
  237. Args:
  238. data (Union[Tensor, Initializer, int, float]): new data.
  239. slice_shape (bool): If slice the Parameter, will not check if shape is match. Default: False.
  240. Retruns:
  241. Parameter, the parameter after set data.
  242. """
  243. def raise_type_error(incoming):
  244. raise TypeError(f"Can not change the Parameter dtype. Current dtype is {self.set_dtype}"
  245. f", and incoming is {incoming}. Use .set_dtype(xxx) to change the dtype.")
  246. if not isinstance(data, (MetaTensor, Initializer, int, float)):
  247. raise TypeError(f"Parameter data must be [`Initializer`, `int`, `float`] or a kind of `MetaTensor` "
  248. f"(like `Tensor` or `MetaTensor`). But with type {type(data)}.")
  249. if isinstance(data, (int, float)):
  250. if self.dtype in mstype.int_type and isinstance(data, float):
  251. raise_type_error(mstype.float_)
  252. data = Tensor(data, self.dtype)
  253. # both not init.
  254. is_incoming_tensor = isinstance(data, Tensor)
  255. is_current_tensor = isinstance(self, Tensor)
  256. if is_incoming_tensor and not is_current_tensor:
  257. raise TypeError("Parameter is a `MetaTensor` and not initializered, `data` for `set_parameter_data`"
  258. "should be a Initializer. If you want to update it by Tensor, call method"
  259. "`init_parameters_data` of `Cell` to init and replace all the Parameter of"
  260. "network, then call this method.")
  261. if tuple(self.shape) != tuple(data.shape):
  262. # If Slice create Parameter shape can be change.
  263. if not slice_shape:
  264. raise ValueError(f"Can not change the shape of Parameter which has been initialized."
  265. f" Current shape is {self.shape}, and incoming is {data.shape}.")
  266. if self.dtype != data.dtype:
  267. raise_type_error(data.dtype)
  268. if isinstance(data, Initializer):
  269. # The parameter has been initializered, directly update by the data
  270. if is_current_tensor:
  271. self._update_tensor_data(data.to_tensor())
  272. else:
  273. # also update the related inited parameter data
  274. if self.inited_param is not None:
  275. self.inited_param.set_parameter_data(data)
  276. self.init_mode = data
  277. elif is_incoming_tensor or is_current_tensor:
  278. self._update_tensor_data(data)
  279. else:
  280. raise ValueError(f"Not support to update the Parameter by {data}")
  281. self.sliced = slice_shape
  282. return self
  283. def init_data(self, layout=None, set_sliced=False):
  284. """
  285. Initialize the parameter data.
  286. Args:
  287. layout (list[list[int]]): Parameter slice layout [dev_mat, tensor_map, slice_shape].
  288. - dev_mat (list[int]): Device matrix.
  289. - tensor_map (list[int]): Tensor map.
  290. - slice_shape (list[int]): Shape of slice.
  291. set_sliced (bool): True if the parameter is set sliced after initializing the data.
  292. Default: False.
  293. Returns:
  294. Parameter, the `Parameter` after initializing data. If current `Parameter` was already initialized before,
  295. returns the same initialized `Parameter`.
  296. """
  297. if self.init_mode is None:
  298. return self
  299. if layout is not None:
  300. if not isinstance(layout, list):
  301. raise TypeError("The layout should be list! layout is {}.".format(layout))
  302. if len(layout) < 3:
  303. raise ValueError("The length of layout must be larger than 3! layout is {}.".format(layout))
  304. slice_index = int(_get_slice_index(layout[0], layout[1]))
  305. if (self.init_in_server and self.is_param_ps and isinstance(self.init_mode, Initializer)):
  306. data = self.init_mode.to_tensor(0, [1])
  307. else:
  308. data = self.init_mode.to_tensor(slice_index, layout[2])
  309. else:
  310. if (self.init_in_server and self.is_param_ps and isinstance(self.init_mode, Initializer)):
  311. data = self.init_mode.to_tensor(0, [1])
  312. else:
  313. data = self.init_mode.to_tensor()
  314. obj = self._update_tensor_data(data)
  315. if id(obj) != id(self):
  316. self._inited_param = obj
  317. obj.init_mode = None
  318. obj.sliced = set_sliced
  319. return obj
  320. class ParameterTuple(tuple):
  321. """
  322. Class for storing tuple of parameters.
  323. Note:
  324. It is used to store the parameters of the network into the parameter tuple collection.
  325. """
  326. def __new__(cls, iterable):
  327. """Create instance object of ParameterTuple."""
  328. data = tuple(iterable)
  329. for x in data:
  330. if not isinstance(x, Parameter):
  331. raise TypeError(f"ParameterTuple input should be `Parameter` collection."
  332. f"But got a {type(iterable)}, {iterable}")
  333. return tuple.__new__(ParameterTuple, tuple(data))
  334. def clone(self, prefix, init='same'):
  335. """
  336. Clone the parameter.
  337. Args:
  338. prefix (str): Namespace of parameter.
  339. init (str): Initialize the shape of the parameter. Default: 'same'.
  340. Returns:
  341. Tuple, the new Parameter tuple.
  342. """
  343. _check_str_by_regular(prefix)
  344. new = []
  345. for x in self:
  346. x1 = x.clone(prefix, init)
  347. new.append(x1)
  348. return ParameterTuple(new)
  349. def __parameter_tuple__(self):
  350. """For parse check."""