You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

serialization.py 19 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """Model and parameters serialization."""
  16. import os
  17. import stat
  18. import numpy as np
  19. import mindspore.nn as nn
  20. import mindspore.context as context
  21. from mindspore import log as logger
  22. from mindspore.train.checkpoint_pb2 import Checkpoint
  23. from mindspore.common.tensor import Tensor
  24. from mindspore.common.initializer import initializer
  25. from mindspore.common.parameter import Parameter
  26. from mindspore.common.api import _executor
  27. from mindspore.common import dtype as mstype
  28. from mindspore._checkparam import check_input_data
  29. __all__ = ["save_checkpoint", "load_checkpoint", "load_param_into_net", "export"]
  30. tensor_to_ms_type = {"Int8": mstype.int8, "Int16": mstype.int16, "Int32": mstype.int32, "Int64": mstype.int64,
  31. "Float16": mstype.float16, "Float32": mstype.float32, "Float64": mstype.float64}
  32. tensor_to_np_type = {"Int8": np.int8, "Int16": np.int16, "Int32": np.int32, "Int64": np.int64,
  33. "Float16": np.float16, "Float32": np.float32, "Float64": np.float64}
  34. def _special_process_par(par, new_par):
  35. """
  36. Processes the special condition.
  37. Like (12,2048,1,1)->(12,2048), this case is caused by GE 4 dimensions tensor.
  38. """
  39. par_shape_len = len(par.data.shape())
  40. new_par_shape_len = len(new_par.data.shape())
  41. delta_len = new_par_shape_len - par_shape_len
  42. delta_i = 0
  43. for delta_i in range(delta_len):
  44. if new_par.data.shape()[par_shape_len + delta_i] != 1:
  45. break
  46. if delta_i == delta_len - 1:
  47. new_val = new_par.data.asnumpy()
  48. new_val = new_val.reshape(par.data.shape())
  49. par.set_parameter_data(Tensor(new_val, par.data.dtype()))
  50. return True
  51. return False
  52. def _update_param(param, new_param):
  53. """Updates param's data from new_param's data."""
  54. if isinstance(param.data, Tensor) and isinstance(new_param.data, Tensor):
  55. if param.data.dtype() != new_param.data.dtype():
  56. logger.error("Failed to combine the net and the parameters for param %s.", param.name)
  57. msg = ("Net parameters {} type({}) different from parameter_dict's({})"
  58. .format(param.name, param.data.dtype(), new_param.data.dtype()))
  59. raise RuntimeError(msg)
  60. if param.data.shape() != new_param.data.shape():
  61. if not _special_process_par(param, new_param):
  62. logger.error("Failed to combine the net and the parameters for param %s.", param.name)
  63. msg = ("Net parameters {} shape({}) different from parameter_dict's({})"
  64. .format(param.name, param.data.shape(), new_param.data.shape()))
  65. raise RuntimeError(msg)
  66. return
  67. param.set_parameter_data(new_param.data)
  68. return
  69. if isinstance(param.data, Tensor) and not isinstance(new_param.data, Tensor):
  70. if param.data.shape() != (1,) and param.data.shape() != ():
  71. logger.error("Failed to combine the net and the parameters for param %s.", param.name)
  72. msg = ("Net parameters {} shape({}) is not (1,), inconsitent with parameter_dict's(scalar)."
  73. .format(param.name, param.data.shape()))
  74. raise RuntimeError(msg)
  75. param.set_parameter_data(initializer(new_param.data, param.data.shape(), param.data.dtype()))
  76. elif isinstance(new_param.data, Tensor) and not isinstance(param.data, Tensor):
  77. logger.error("Failed to combine the net and the parameters for param %s.", param.name)
  78. msg = ("Net parameters {} type({}) different from parameter_dict's({})"
  79. .format(param.name, type(param.data), type(new_param.data)))
  80. raise RuntimeError(msg)
  81. else:
  82. param.set_parameter_data(type(param.data)(new_param.data))
  83. def save_checkpoint(parameter_list, ckpoint_file_name):
  84. """
  85. Saves checkpoint info to a specified file.
  86. Args:
  87. parameter_list (list): Parameters list, each element is a dict
  88. like {"name":xx, "type":xx, "shape":xx, "data":xx}.
  89. ckpoint_file_name (str): Checkpoint file name.
  90. Raises:
  91. RuntimeError: Failed to save the Checkpoint file.
  92. """
  93. logger.info("Execute save checkpoint process.")
  94. checkpoint_list = Checkpoint()
  95. try:
  96. for param in parameter_list:
  97. param_value = checkpoint_list.value.add()
  98. param_value.tag = param["name"]
  99. param_tensor = param_value.tensor
  100. param_data = param["data"].asnumpy().reshape(-1)
  101. param_tensor.tensor_content = param_data.tostring()
  102. param_tensor.tensor_type = str(param["data"].dtype())
  103. if param['data'].shape() == ():
  104. param_tensor.dims.append(0)
  105. else:
  106. for dim in param['data'].shape():
  107. param_tensor.dims.append(dim)
  108. with open(ckpoint_file_name, "wb") as f:
  109. f.write(checkpoint_list.SerializeToString())
  110. os.chmod(ckpoint_file_name, stat.S_IRUSR)
  111. except BaseException as e:
  112. logger.error("Failed to save the checkpoint file %s.", ckpoint_file_name)
  113. raise RuntimeError(e.__str__())
  114. logger.info("Save checkpoint process finish.")
  115. def load_checkpoint(ckpoint_file_name, net=None):
  116. """
  117. Loads checkpoint info from a specified file.
  118. Args:
  119. ckpoint_file_name (str): Checkpoint file name.
  120. net (Cell): Cell network. Default: None
  121. Returns:
  122. Dict, key is parameter name, value is a Parameter.
  123. Raises:
  124. ValueError: Checkpoint file is incorrect.
  125. """
  126. if not isinstance(ckpoint_file_name, str):
  127. raise ValueError("The ckpoint_file_name must be String.")
  128. if not os.path.exists(ckpoint_file_name) or ckpoint_file_name[-5:] != ".ckpt":
  129. raise ValueError("Please input the correct checkpoint file name.")
  130. if os.path.getsize(ckpoint_file_name) == 0:
  131. raise ValueError("The checkpoint file may be empty, please make sure enter the correct file name.")
  132. logger.info("Execute load checkpoint process.")
  133. checkpoint_list = Checkpoint()
  134. try:
  135. with open(ckpoint_file_name, "rb") as f:
  136. pb_content = f.read()
  137. checkpoint_list.ParseFromString(pb_content)
  138. except BaseException as e:
  139. logger.error("Failed to read the checkpoint file %s, please check the correct of the file.", ckpoint_file_name)
  140. raise ValueError(e.__str__())
  141. parameter_dict = {}
  142. try:
  143. for element in checkpoint_list.value:
  144. data = element.tensor.tensor_content
  145. data_type = element.tensor.tensor_type
  146. np_type = tensor_to_np_type[data_type]
  147. ms_type = tensor_to_ms_type[data_type]
  148. param_data = np.fromstring(data, np_type)
  149. dims = element.tensor.dims
  150. if dims == [0]:
  151. if 'Float' in data_type:
  152. param_data = float(param_data[0])
  153. elif 'Int' in data_type:
  154. param_data = int(param_data[0])
  155. parameter_dict[element.tag] = Parameter(Tensor(param_data, ms_type), name=element.tag)
  156. elif dims == [1]:
  157. parameter_dict[element.tag] = Parameter(Tensor(param_data, ms_type), name=element.tag)
  158. else:
  159. param_dim = []
  160. for dim in dims:
  161. param_dim.append(dim)
  162. param_value = param_data.reshape(param_dim)
  163. parameter_dict[element.tag] = Parameter(Tensor(param_value, ms_type), name=element.tag)
  164. logger.info("Load checkpoint process finish.")
  165. except BaseException as e:
  166. logger.error("Failed to load the checkpoint file %s.", ckpoint_file_name)
  167. raise RuntimeError(e.__str__())
  168. if net:
  169. load_param_into_net(net, parameter_dict)
  170. return parameter_dict
  171. def load_param_into_net(net, parameter_dict):
  172. """
  173. Loads parameters into network.
  174. Args:
  175. net (Cell): Cell network.
  176. parameter_dict (dict): Parameter dict.
  177. Raises:
  178. TypeError: Argument is not a Cell, or parameter_dict is not a Parameter dict.
  179. """
  180. if not isinstance(net, nn.Cell):
  181. logger.error("Failed to combine the net and the parameters.")
  182. msg = ("Argument net should be a Cell, but got {}.".format(type(net)))
  183. raise TypeError(msg)
  184. if not isinstance(parameter_dict, dict):
  185. logger.error("Failed to combine the net and the parameters.")
  186. msg = ("Argument parameter_dict should be a dict, but got {}.".format(type(parameter_dict)))
  187. raise TypeError(msg)
  188. logger.info("Execute load parameter into net process.")
  189. for name in parameter_dict:
  190. for _, param in net.parameters_and_names():
  191. if name == param.name and param.layerwise_parallel:
  192. # layerwise parallel parameter data loaded from checkpoint file,
  193. # was a complete(merged) data, need to be splited
  194. new_param = parameter_dict[param.name]
  195. _load_tensor_for_layerwise(new_param, param)
  196. break
  197. param_not_load = []
  198. for _, param in net.parameters_and_names():
  199. if param.name in parameter_dict:
  200. new_param = parameter_dict[param.name]
  201. if not isinstance(new_param, Parameter):
  202. logger.error("Failed to combine the net and the parameters.")
  203. msg = ("Argument parameter_dict element should be a Parameter, but got {}.".format(type(new_param)))
  204. raise TypeError(msg)
  205. _update_param(param, new_param)
  206. else:
  207. param_not_load.append(param.name)
  208. if param_not_load:
  209. _load_dismatch_prefix_params(net, parameter_dict, param_not_load)
  210. logger.debug("Params not matched(in net but not in parameter_dict):")
  211. for param_name in param_not_load:
  212. logger.debug("%s", param_name)
  213. logger.info("Load parameter into net finish, {} parameters has not been loaded.".format(len(param_not_load)))
  214. def _load_dismatch_prefix_params(net, parameter_dict, param_not_load):
  215. """When some net parameter did not load, try to continue load."""
  216. prefix_name = ""
  217. longest_name = param_not_load[0]
  218. while prefix_name != longest_name and param_not_load:
  219. logger.debug("Count: {} parameters has not been loaded, try to load continue.".format(len(param_not_load)))
  220. longest_name = sorted(param_not_load, key=len, reverse=True)[0]
  221. prefix_name = longest_name
  222. for net_param_name in param_not_load:
  223. for dict_name in parameter_dict:
  224. if dict_name.endswith(net_param_name):
  225. tmp_name = dict_name[:-len(net_param_name)]
  226. prefix_name = prefix_name if len(prefix_name) < len(tmp_name) else tmp_name
  227. if prefix_name != longest_name:
  228. logger.info("Remove parameter prefix name: {}, continue to load.".format(prefix_name))
  229. for _, param in net.parameters_and_names():
  230. new_param_name = prefix_name + param.name
  231. if param.name in param_not_load and new_param_name in parameter_dict:
  232. new_param = parameter_dict[new_param_name]
  233. _update_param(param, new_param)
  234. param_not_load.remove(param.name)
  235. def _save_graph(network, file_name):
  236. """
  237. Saves the graph of network to a file.
  238. Args:
  239. network (Cell): Obtain a pipeline through network for saving graph.
  240. file_name (str): Graph file name into which the graph will be saved.
  241. """
  242. logger.info("Execute save the graph process.")
  243. graph_proto = network.get_func_graph_proto()
  244. if graph_proto:
  245. with open(file_name, "wb") as f:
  246. f.write(graph_proto)
  247. os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
  248. def _exec_save_checkpoint(train_network, ckpoint_file_name, integrated_save=True):
  249. """
  250. Saves checkpoint for 'ms' backend.
  251. Args:
  252. train_network (Network): The train network for training.
  253. ckpoint_file_name (str): The name of checkpoint file.
  254. integrated_save (bool): Whether to intergrated save in automatic model parallel scene.
  255. """
  256. param_dict = {}
  257. for _, param in train_network.parameters_and_names():
  258. param_dict[param.name] = param
  259. param_list = []
  260. for (key, value) in param_dict.items():
  261. each_param = {"name": key}
  262. if isinstance(value.data, Tensor):
  263. param_data = value.data
  264. else:
  265. param_data = Tensor(value.data)
  266. # in automatic model parallel scenario, some parameters were spliteds to all the devices,
  267. # which should be combined before saving
  268. if integrated_save and key in train_network.parameter_layout_dict:
  269. param_data = _get_merged_param_data(train_network, key, param_data)
  270. each_param["data"] = param_data
  271. param_list.append(each_param)
  272. save_checkpoint(param_list, ckpoint_file_name)
  273. def _get_merged_param_data(net, param_name, param_data):
  274. """
  275. Gets the merged data(tensor) from tensor slice, by device arrangement and tensor map.
  276. Args:
  277. net (Cell): MindSpore network.
  278. param_name(str): The parameter name, which to be combined.
  279. param_data(Tensor):The parameter data on the local device,
  280. It was a slice of the whole parameter data.
  281. Returns:
  282. Tensor, the combined tensor which with the whole data value.
  283. """
  284. layout = []
  285. layout = net.parameter_layout_dict[param_name]
  286. if len(layout) < 2:
  287. logger.info("layout dict does not contain the key %s", param_name)
  288. return param_data
  289. dev_mat = layout[0]
  290. tensor_map = layout[1]
  291. from mindspore.parallel._cell_wrapper import get_allgather_cell
  292. from mindspore.parallel._tensor import _reshape_param_data
  293. # while any dim is not equal to -1, means param is splited and needs to be merged
  294. for dim in tensor_map:
  295. if dim != -1:
  296. allgather_net = get_allgather_cell()
  297. param_data = allgather_net(param_data)
  298. return _reshape_param_data(param_data, dev_mat, tensor_map)
  299. return param_data
  300. def _load_tensor_for_layerwise(new_param, old_param):
  301. """
  302. Replaces parameters with sliced tensors by layerwise parallel strategies.
  303. Args:
  304. new_param (Parameter): The new layerwise parallel parameter, will be loaded into net.
  305. old_param(Parameter): The current parameter in the net.
  306. """
  307. if not isinstance(new_param.data, Tensor) or not isinstance(old_param.data, Tensor):
  308. logger.error("Failed to combine the net and the parameters.")
  309. msg = ("layerwise parallel parameter should be a Tensor, but got {}.".format(type(new_param.data)))
  310. raise TypeError(msg)
  311. if old_param.data.shape() == new_param.data.shape():
  312. return
  313. from mindspore.parallel._tensor import _load_tensor
  314. from mindspore.communication.management import get_group_size
  315. dev_mat = [get_group_size()]
  316. shape = new_param.data.shape()
  317. for x in range(len(shape)): # dim 0 set 0, others set -1
  318. if x:
  319. tensor_map.append(-1)
  320. new_tensor = _load_tensor(new_param.data, dev_mat, tensor_map)
  321. new_param.set_parameter_data(new_tensor)
  322. def _fill_param_into_net(net, parameter_list):
  323. """
  324. Fills parameter_list into net.
  325. Args:
  326. net (Cell): train network.
  327. parameter_list (list): parameters list from ge callback.
  328. """
  329. parameter_dict = {}
  330. for each_param in parameter_list:
  331. param_name = each_param["name"]
  332. np_val = each_param["data"].asnumpy()
  333. if np_val.shape == (1,):
  334. parameter_dict[param_name] = Parameter(np_val, name=param_name)
  335. elif np_val.shape == ():
  336. parameter_dict[param_name] = Parameter(Tensor(np_val.tolist(), mstype.pytype_to_dtype(np_val.dtype)),
  337. name=param_name)
  338. else:
  339. parameter_dict[param_name] = Parameter(Tensor(np_val), name=param_name)
  340. load_param_into_net(net, parameter_dict)
  341. def export(net, *inputs, file_name, file_format='GEIR'):
  342. """
  343. Exports MindSpore predict model to file in specified format.
  344. Args:
  345. net (Cell): MindSpore network.
  346. inputs (Tensor): Inputs of the `net`.
  347. file_name (str): File name of model to export.
  348. file_format (str): MindSpore currently supports 'GEIR', 'ONNX' and 'LITE' format for exported model.
  349. - GEIR: Graph Engine Intermidiate Representation. An intermidiate representation format of
  350. Ascend model.
  351. - ONNX: Open Neural Network eXchange. An open format built to represent machine learning models.
  352. - LITE: Huawei model format for mobile. A lite model only for the MindSpore Lite
  353. """
  354. logger.info("exporting model file:%s format:%s.", file_name, file_format)
  355. check_input_data(*inputs, data_class=Tensor)
  356. supported_formats = ['GEIR', 'ONNX', 'LITE']
  357. if file_format not in supported_formats:
  358. raise ValueError(f'Illegal file format {file_format}, it must be one of {supported_formats}')
  359. # switch network mode to infer when it is training
  360. is_training = net.training
  361. if is_training:
  362. net.set_train(mode=False)
  363. # export model
  364. if file_format == 'GEIR':
  365. _executor.compile(net, *inputs, phase='export')
  366. _executor.export(net, file_name, file_format)
  367. elif file_format == 'ONNX': # file_format is 'ONNX'
  368. phase_name = 'export_onnx'
  369. graph_id, _ = _executor.compile(net, *inputs, phase=phase_name, do_convert=False)
  370. onnx_stream = _executor._get_func_graph_proto(graph_id)
  371. with open(file_name, 'wb') as f:
  372. os.chmod(file_name, stat.S_IWUSR | stat.S_IRUSR)
  373. f.write(onnx_stream)
  374. elif file_format == 'LITE': # file_format is 'LITE'
  375. context.set_context(save_ms_model=True, save_ms_model_path=file_name)
  376. net(*inputs)
  377. # restore network training mode
  378. if is_training:
  379. net.set_train(mode=True)