You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

cell.py 25 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667
  1. # Copyright 2020 Huawei Technologies Co., Ltd
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ============================================================================
  15. """cell"""
  16. import time
  17. import gc
  18. from collections import OrderedDict
  19. from mindspore import log as logger
  20. from .. import context
  21. from ..common import dtype as mstype
  22. from ..common.api import _executor
  23. from .._checkparam import _check_str_by_regular
  24. from ..common.parameter import Parameter, ParameterTuple
  25. from .._c_expression import init_backend
  26. from ..ops.primitive import Primitive
  27. from ..parallel._tensor import _load_tensor_by_layout
  28. from ..parallel._utils import _get_parallel_mode
  29. from ..common.tensor import Tensor
  30. class Cell:
  31. """
  32. Base class for all neural network.
  33. A 'Cell' could be a single neural network cell, such as conv2d, relu, batch_norm, etc. or a composition of
  34. cells to constructing a network.
  35. Note:
  36. In general, the autograd algorithm will automatically generate the implementation of the gradient function,
  37. but if bprop method is implemented, the gradient function
  38. will be replaced by the bprop. The bprop implementation will receive a Tensor `dout` containing the gradient
  39. of the loss w.r.t. the output, and a Tensor `out` containing the forward result. The bprop need to compute the
  40. gradient of the loss w.r.t. the inputs, gradient of the loss w.r.t. Parameter variables is not supported
  41. currently.
  42. Args:
  43. auto_prefix (bool): Recursively generate namespaces. Default: True.
  44. Examples:
  45. >>> class MyCell(Cell):
  46. >>> def __init__(self):
  47. >>> super(MyCell, self).__init__()
  48. >>> self.relu = P.ReLU()
  49. >>>
  50. >>> def construct(self, x):
  51. >>> return self.relu(x)
  52. """
  53. def __init__(self, auto_prefix=True, flags=None):
  54. self._params = OrderedDict()
  55. self._cells = OrderedDict()
  56. self.training = False
  57. self.pynative = False
  58. self._auto_prefix = auto_prefix
  59. self._scope = None
  60. self._phase = 'train'
  61. self._parameter_layout_dict = {}
  62. self._create_time = int(time.time() * 1e9)
  63. init_backend()
  64. # call gc to release GE session resources used by non-used cell objects
  65. gc.collect()
  66. self._construct_inputs_num = 0
  67. self._construct_inputs_names = []
  68. if _get_parallel_mode() in ["auto_parallel", "semi_auto_parallel"]:
  69. self._get_construct_inputs_number_and_name()
  70. self._parallel_inputs_run = None
  71. if flags:
  72. self.add_flags(**flags)
  73. @property
  74. def create_time(self):
  75. return self._create_time
  76. @property
  77. def cell_init_args(self):
  78. return self._cell_init_args
  79. @cell_init_args.setter
  80. def cell_init_args(self, value):
  81. if not isinstance(value, str):
  82. raise TypeError("'cell_init_args' must be string type.")
  83. self._cell_init_args = value
  84. @property
  85. def phase(self):
  86. return self._phase
  87. @phase.setter
  88. def phase(self, value):
  89. if not isinstance(value, str):
  90. raise TypeError("'phase' must be string type.")
  91. self._phase = value
  92. @property
  93. def parameter_layout_dict(self):
  94. return self._parameter_layout_dict
  95. @property
  96. def cls_name(self):
  97. return self.__class__.__name__
  98. @parameter_layout_dict.setter
  99. def parameter_layout_dict(self, value):
  100. if not isinstance(value, dict):
  101. raise TypeError("'parameter_layout_dict' must be dict type.")
  102. self._parameter_layout_dict = value
  103. def get_func_graph_proto(self):
  104. """Return graph binary proto."""
  105. return _executor._get_func_graph_proto(self.phase + "." + str(self.create_time), "anf_ir", True)
  106. def __getattr__(self, name):
  107. if '_params' in self.__dict__:
  108. params = self.__dict__['_params']
  109. if name in params:
  110. return params[name]
  111. if '_cells' in self.__dict__:
  112. cells = self.__dict__['_cells']
  113. if name in cells:
  114. return cells[name]
  115. raise AttributeError("'{}' object has no attribute '{}'.".format(type(self).__name__, name))
  116. def __del__(self):
  117. if hasattr(self, "_create_time"):
  118. _executor.del_net_res(str(self._create_time))
  119. def __delattr__(self, name):
  120. if name in self._params:
  121. del self._params[name]
  122. elif name in self._cells:
  123. del self._cells[name]
  124. else:
  125. object.__delattr__(self, name)
  126. def __call__(self, *inputs):
  127. if context.get_context("mode") == context.GRAPH_MODE:
  128. out = self.compile_and_run(*inputs)
  129. return out
  130. output = self.construct(*inputs)
  131. if isinstance(output, Parameter):
  132. output = output.data
  133. return output
  134. def __setattr__(self, name, value):
  135. cells = self.__dict__.get('_cells')
  136. params = self.__dict__.get('_params')
  137. if isinstance(value, Parameter):
  138. if params is None:
  139. raise AttributeError("Can not assign params before Cell.__init__() call.")
  140. if name in self.__dict__:
  141. if self.__dict__[name] is not None:
  142. raise TypeError("Expected type is not in (Parameter, Cell), but got Parameter.")
  143. del self.__dict__[name]
  144. if cells and name in cells:
  145. raise TypeError("Expected type is Cell, but got Parameter.")
  146. self.insert_param_to_cell(name, value)
  147. elif isinstance(value, ParameterTuple):
  148. if params is None:
  149. raise AttributeError("Can not assign params before Cell.__init__() call.")
  150. for item in value:
  151. self.insert_param_to_cell(item.name, item, check_name=False)
  152. object.__setattr__(self, name, value)
  153. elif isinstance(value, Cell):
  154. if cells is None:
  155. raise AttributeError("Can not assign cells before Cell.__init__() call.")
  156. if name in self.__dict__:
  157. del self.__dict__[name]
  158. if params and name in params:
  159. raise TypeError("Expected type is Parameter, but got Cell.")
  160. if self._auto_prefix:
  161. value.update_parameters_name(name + '.')
  162. cells[name] = value
  163. elif params and name in params:
  164. if value is not None:
  165. raise TypeError("Expected type in (Parameter, ParameterTuple), but got {}.".format(type(value)))
  166. self.insert_param_to_cell(name, None)
  167. elif cells and name in cells:
  168. if value is not None:
  169. raise TypeError("Expected type is cell, but got {}.".format(type(value)))
  170. self._cells[name] = None
  171. else:
  172. if isinstance(value, Primitive):
  173. value.set_prim_instance_name(name)
  174. object.__setattr__(self, name, value)
  175. def extend_repr(self):
  176. """
  177. Sets the extended representation of the Cell.
  178. To print customized extended information, re-implement this method in your own cells.
  179. """
  180. return ''
  181. def __repr__(self):
  182. extra_str = self.extend_repr()
  183. info_str = self.__class__.__name__ + '<'
  184. if self._cells:
  185. sub_str = '\n'
  186. if extra_str:
  187. sub_str += '{}\n'.format(self.extend_repr())
  188. for key, value in self._cells.items():
  189. sub_str += '({}): {}\n'.format(key, repr(value))
  190. sub_str = sub_str.replace('\n', '\n ') + '>'
  191. info_str += sub_str
  192. else:
  193. info_str += extra_str + '>'
  194. return info_str
  195. def load_parameter_slice(self, params):
  196. """
  197. Replace parameters with sliced tensors by parallel strategies.
  198. Please refer to the usage in source code of `mindspore.common._Executor.compile`.
  199. Args:
  200. params (dict): The parameters dictionary used for init data graph.
  201. """
  202. if params is None:
  203. for key in self.parameters_dict():
  204. tensor = self.parameters_dict()[key].data
  205. if key not in self.parameter_layout_dict:
  206. logger.info("layout dict does not contain the key %s", key)
  207. continue
  208. layout = self.parameter_layout_dict[key]
  209. new_tensor = _load_tensor_by_layout(tensor, layout)
  210. self.parameters_dict()[key].set_parameter_data(new_tensor)
  211. elif isinstance(params, OrderedDict):
  212. for key in params:
  213. tensor = params[key].data
  214. if key not in self.parameter_layout_dict:
  215. logger.info("layout dict does not contain the key %s", key)
  216. continue
  217. layout = self.parameter_layout_dict[key]
  218. new_tensor = _load_tensor_by_layout(tensor, layout)
  219. params[key].set_parameter_data(new_tensor)
  220. else:
  221. raise TypeError('Parameters need OrderedDict type, but got {}'.
  222. format(type(params)))
  223. def _load_inputs(self, *inputs):
  224. """
  225. Slice inputs tensors by parallel strategies.
  226. Args:
  227. inputs (Function or Cell): inputs of construct method.
  228. """
  229. parallel_inputs_run = []
  230. if len(inputs) > self._construct_inputs_num:
  231. raise ValueError('Len of inputs: {} is bigger than self._construct_inputs_num: {}.'.
  232. format(len(inputs), self._construct_inputs_num))
  233. for i, tensor in enumerate(inputs):
  234. key = self._construct_inputs_names[i]
  235. # if input is not used, self.parameter_layout_dict may not contain the key
  236. if key not in self.parameter_layout_dict:
  237. logger.warning("layout dict does not contain the key %s", key)
  238. parallel_inputs_run.append(tensor)
  239. else:
  240. layout = self.parameter_layout_dict[key]
  241. new_tensor = _load_tensor_by_layout(tensor, layout)
  242. parallel_inputs_run.append(new_tensor)
  243. return tuple(parallel_inputs_run)
  244. def _get_construct_inputs_number_and_name(self):
  245. """Compute self._construct_inputs_names and self._construct_inputs_num"""
  246. import inspect
  247. from mindspore._extends.parse.parser import get_parse_method_of_class
  248. fn = get_parse_method_of_class(self)
  249. inspect.getfullargspec(fn)
  250. self._construct_inputs_num = fn.__code__.co_argcount
  251. self._construct_inputs_names = fn.__code__.co_varnames
  252. assert self._construct_inputs_num > 0
  253. assert self._construct_inputs_names[0] == 'self'
  254. assert self._construct_inputs_num - 1 <= len(self._construct_inputs_names)
  255. self._construct_inputs_names = self._construct_inputs_names[1:self._construct_inputs_num]
  256. self._construct_inputs_num = self._construct_inputs_num - 1
  257. def compile_and_run(self, *inputs):
  258. """
  259. Compiles and runs cell.
  260. Args:
  261. inputs (tuple): Input parameters.
  262. Returns:
  263. Object, the result of executing.
  264. """
  265. _, compile_flag = _executor.compile(self, *inputs, phase=self.phase)
  266. if _get_parallel_mode() in ["auto_parallel", "semi_auto_parallel"]:
  267. if inputs and isinstance(inputs[0], Tensor) and inputs[0].virtual_flag and (not compile_flag):
  268. parallel_inputs_run = self._parallel_inputs_run
  269. else:
  270. self._parallel_inputs_run = self._load_inputs(*inputs)
  271. parallel_inputs_run = self._parallel_inputs_run
  272. return _executor(self, *parallel_inputs_run, phase=self.phase)
  273. return _executor(self, *inputs, phase=self.phase)
  274. def exec_checkpoint_graph(self):
  275. """Executes saving checkpoint graph operation."""
  276. _executor(self, phase='save')
  277. def insert_param_to_cell(self, param_name, param, check_name=True):
  278. """
  279. Adds a parameter to the current cell.
  280. Inserts a parameter with given name to the cell. Please refer to the usage in
  281. source code of `mindspore.nn.Cell.__setattr__`.
  282. Args:
  283. param_name (str): Name of the parameter.
  284. param (Parameter): Parameter to be inserted to the cell.
  285. check_name (bool): Determines whether the name input is compatible. Default: True.
  286. Raises:
  287. KeyError: If the name of parameter is null or contains dot.
  288. AttributeError: If user did not call init() first.
  289. TypeError: If the type of parameter is not Parameter.
  290. """
  291. if not param_name:
  292. raise KeyError("The name of parameter should not be null.")
  293. if check_name and '.' in param_name:
  294. raise KeyError("The name of parameter should not contain \".\"")
  295. if '_params' not in self.__dict__:
  296. raise AttributeError("You need call init() first.")
  297. if hasattr(self, param_name) and param_name not in self._params:
  298. raise KeyError("Duplicated parameter name '{}'.".format(param_name))
  299. if not isinstance(param, Parameter) and param is not None:
  300. raise TypeError("The type of parameter should be 'Parameter' if not None.")
  301. self._params[param_name] = param
  302. def insert_child_to_cell(self, child_name, child):
  303. """
  304. Adds a child cell to the current cell.
  305. Inserts a subcell with given name to current cell.
  306. Args:
  307. child_name (str): Name of the child cell.
  308. child (Cell): The child cell to be inserted.
  309. Raises:
  310. KeyError: Child Cell's name is incorrect or duplicated with the other child name.
  311. TypeError: Child Cell's type is incorrect.
  312. """
  313. if not child_name or '.' in child_name:
  314. raise KeyError("Child cell name is incorrect.")
  315. if hasattr(self, child_name) and child_name not in self._cells:
  316. raise KeyError("Duplicate child name '{}'.".format(child_name))
  317. if not isinstance(child, Cell) and child is not None:
  318. raise TypeError("Child cell type is incorrect.")
  319. self._cells[child_name] = child
  320. def construct(self, *inputs):
  321. """
  322. Defines the computation to be performed.
  323. This method should be overridden by all subclasses.
  324. Note:
  325. The inputs of the top cell only allow Tensor.
  326. Other types (tuple, list, int etc.) are forbidden.
  327. Returns:
  328. Tensor, returns the computed result.
  329. """
  330. raise NotImplementedError
  331. def parameters_dict(self, recurse=True):
  332. """
  333. Gets parameters dictionary.
  334. Gets the parameters dictionary of this cell.
  335. Args:
  336. recurse (bool): Whether contains the parameters of subcells. Default: True.
  337. Returns:
  338. OrderedDict, return parameters dictionary.
  339. """
  340. param_dict = OrderedDict()
  341. for param in self.get_parameters(expand=recurse):
  342. param_dict[param.name] = param
  343. return param_dict
  344. def parameters_broadcast_dict(self, recurse=True):
  345. param_dict = OrderedDict()
  346. for param in self.get_parameters(expand=recurse):
  347. if param.layerwise_parallel is False:
  348. param_dict[param.name] = param
  349. if not param_dict:
  350. return None
  351. return param_dict
  352. def update_parameters_name(self, prefix='', recurse=True):
  353. """
  354. Updates the names of parameters with given prefix string.
  355. Adds the given prefix to the names of parameters.
  356. Args:
  357. prefix (str): The prefix string.
  358. recurse (bool): Whether contains the parameters of subcells. Default: True.
  359. """
  360. _check_str_by_regular(prefix)
  361. for name, param in self.parameters_and_names(expand=recurse):
  362. if prefix != '':
  363. param.is_init = False
  364. param.name = prefix + name
  365. def trainable_params(self, recurse=True):
  366. """
  367. Returns all trainable parameters.
  368. Returns a list of all trainable parmeters.
  369. Args:
  370. recurse (bool): Whether contains the trainable parameters of subcells. Default: True.
  371. Returns:
  372. List, the list of trainable parameters.
  373. """
  374. return list(filter(lambda x: x.requires_grad, self.get_parameters(expand=recurse)))
  375. def untrainable_params(self, recurse=True):
  376. """
  377. Returns all untrainable parameters.
  378. Returns a list of all untrainable parmeters.
  379. Args:
  380. recurse (bool): Whether contains the untrainable parameters of subcells. Default: True.
  381. Returns:
  382. List, the list of untrainable parameters.
  383. """
  384. return list(filter(lambda x: not x.requires_grad, self.get_parameters(expand=recurse)))
  385. def get_parameters(self, expand=True):
  386. """
  387. Returns an iterator over cell parameters.
  388. Yields parameters of this cell. If `expand` is True, yield parameters of this cell and all subcells.
  389. Args:
  390. expand (bool): If True, yields parameters of this cell and all subcells. Otherwise, yields only parameters
  391. that are direct members of this cell. Default: True.
  392. Examples:
  393. >>> net = Net()
  394. >>> for item in net.get_parameters():
  395. >>> print(item)
  396. """
  397. for _, param in self.parameters_and_names(expand=expand):
  398. yield param
  399. def check_names(self):
  400. names = set("")
  401. for value, param in self.parameters_and_names():
  402. if param.name in names:
  403. raise ValueError("The value of {} is {}, its name '{}' already exists.".
  404. format(value, param, param.name))
  405. names.add(param.name)
  406. def parameters_and_names(self, name_prefix='', expand=True):
  407. """
  408. Returns an iterator over cell parameters.
  409. Includes the parameter's name and itself.
  410. Args:
  411. name_prefix (str): Namespace. Default: ''.
  412. expand (bool): If True, yields parameters of this cell and all subcells. Otherwise, yields only parameters
  413. that are direct members of this cell. Default: True.
  414. Examples:
  415. >>> n = Net()
  416. >>> names = []
  417. >>> for m in n.parameters_and_names():
  418. >>> if m[0]:
  419. >>> names.append(m[0])
  420. """
  421. cells = []
  422. if expand:
  423. cells = self.cells_and_names(name_prefix=name_prefix)
  424. else:
  425. cells.append((name_prefix, self))
  426. params_set = set()
  427. for cell_name, cell in cells:
  428. params = cell._params.items()
  429. for par_name, par in params:
  430. if par and par not in params_set:
  431. params_set.add(par)
  432. par_new_name = par_name
  433. if cell_name:
  434. par_new_name = cell_name + '.' + par_new_name
  435. yield par_new_name, par
  436. def cells_and_names(self, cells=None, name_prefix=''):
  437. """
  438. Returns an iterator over all cells in the network.
  439. Includes the cell's name and itself.
  440. Args:
  441. cells (str): Cells to iterate over. Default: None.
  442. name_prefix (str): Namespace. Default: ''.
  443. Examples:
  444. >>> n = Net()
  445. >>> names = []
  446. >>> for m in n.cells_and_names():
  447. >>> if m[0]:
  448. >>> names.append(m[0])
  449. """
  450. t_cells = cells if cells else set()
  451. if self in t_cells:
  452. return
  453. t_cells.add(self)
  454. yield name_prefix, self
  455. for name, cell in self._cells.items():
  456. if cell:
  457. cells_name_prefix = name
  458. if name_prefix:
  459. cells_name_prefix = name_prefix + '.' + cells_name_prefix
  460. for ele in cell.cells_and_names(t_cells, cells_name_prefix):
  461. yield ele
  462. def cells(self):
  463. """Returns an iterator over immediate cells."""
  464. return self.name_cells().values()
  465. def _set_scope(self, name):
  466. """Sets the name on the first time."""
  467. if self._scope is None:
  468. self._scope = name
  469. def _children_scope_recursive(self, parent_prefix='Default'):
  470. """Generates the scope of each layer of the network recursively."""
  471. reserve_class_name_in_scope = context.get_context("reserve_class_name_in_scope")
  472. for name, cell in self.name_cells().items():
  473. yield parent_prefix + "/" + name + (("-" + cell.__class__.__name__)
  474. if reserve_class_name_in_scope else ""), cell
  475. for name, cell in self.name_cells().items():
  476. for key, value in cell._children_scope_recursive(parent_prefix + "/" + name +
  477. (("-" + cell.__class__.__name__)
  478. if reserve_class_name_in_scope else "")):
  479. yield key, value
  480. def get_scope(self):
  481. """Returns the scope of a cell object in one network."""
  482. return self._scope
  483. def generate_scope(self):
  484. """Generate the scope for every cell object in the network."""
  485. for name, cell in self._children_scope_recursive():
  486. cell._set_scope(name)
  487. def name_cells(self):
  488. """
  489. Returns an iterator over all cells in the network.
  490. Include name of the cell and cell itself.
  491. """
  492. value_set = set()
  493. cells = OrderedDict()
  494. for name, cell in self._cells.items():
  495. if cell is not None and cell not in value_set:
  496. value_set.add(cell)
  497. cells[name] = cell
  498. return cells
  499. def add_flags(self, **flags):
  500. for x in flags:
  501. if not isinstance(flags[x], bool):
  502. raise TypeError(f"Flags (f{x}) must be bool but {type(flags[x])}.")
  503. if not hasattr(self, "_mindspore_flags"):
  504. self._mindspore_flags = {}
  505. self._mindspore_flags.update({**flags})
  506. self.__dict__.update({**flags})
  507. return self
  508. def add_flags_recursive(self, **flags):
  509. self.add_flags(**flags)
  510. if hasattr(self, '_cell_init_args'):
  511. self._cell_init_args += str({**flags})
  512. for cell in self.cells():
  513. cell.add_flags_recursive(**flags)
  514. return self
  515. def get_flags(self):
  516. if not hasattr(self, "_mindspore_flags"):
  517. self._mindspore_flags = {}
  518. return self._mindspore_flags
  519. def to_float(self, dst_type):
  520. """
  521. Add cast on all inputs of cell and child cells to run with certain float type.
  522. If `dst_type is mindspore.dtype.float16`, all the inputs of Cell including input, Parameter, Tensor
  523. as const will be cast to float16. Please refer to the usage in source code of
  524. `mindspore.train.amp.build_train_network`.
  525. Note:
  526. Call multiple times will overwrite the previous.
  527. Args:
  528. dst_type (:class:`mindspore.dtype`): Transfer Cell to Run with dst_type.
  529. dst_type can be `mindspore.dtype.float16` or `mindspore.dtype.float32`.
  530. Raises:
  531. ValueError: If dst_type is not float32 or float16.
  532. """
  533. if dst_type not in (mstype.float16, mstype.float32):
  534. raise ValueError("dst_type should inside float32 or float16.")
  535. flags = {'fp16': dst_type == mstype.float16, 'fp32': dst_type == mstype.float32}
  536. self.add_flags_recursive(**flags)
  537. return self
  538. def set_train(self, mode=True):
  539. """
  540. Sets the cell to training mode.
  541. The cell itself and all children cells will be set to training mode.
  542. Args:
  543. mode (bool): Specifies whether the model is training. Default: True.
  544. """
  545. if mode is False:
  546. self._phase = 'predict'
  547. else:
  548. self._phase = 'train'
  549. self.add_flags_recursive(training=mode)
  550. return self
  551. def set_broadcast_flag(self, mode=True):
  552. """
  553. Set the cell to data_parallel mode.
  554. The cell can be accessed as an attribute using the given name.
  555. Args:
  556. mode (bool): Specifies whether the model is data_parallel. Default: True.
  557. """
  558. self.add_flags_recursive(broadcast_flag=mode)
  559. return self