You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

activation.py 21 kB

5 years ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605
  1. #! /usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. from tensorlayer import logging
  4. import tensorlayer as tl
  5. from tensorlayer.initializers import truncated_normal
  6. from tensorlayer.layers.core import Module
  7. __all__ = [
  8. 'PRelu',
  9. 'PRelu6',
  10. 'PTRelu6',
  11. 'LeakyReLU',
  12. 'LeakyReLU6',
  13. 'LeakyTwiceRelu6',
  14. 'Ramp',
  15. 'Swish',
  16. 'HardTanh',
  17. 'Mish'
  18. ]
  19. class PRelu(Module):
  20. """
  21. The :class:`PRelu` class is Parametric Rectified Linear layer.
  22. It follows f(x) = alpha * x for x < 0, f(x) = x for x >= 0,
  23. where alpha is a learned array with the same shape as x.
  24. Parameters
  25. ----------
  26. channel_shared : boolean
  27. If True, single weight is shared by all channels.
  28. in_channels: int
  29. The number of channels of the previous layer.
  30. If None, it will be automatically detected when the layer is forwarded for the first time.
  31. a_init : initializer
  32. The initializer for initializing the alpha(s).
  33. name : None or str
  34. A unique layer name.
  35. Examples
  36. -----------
  37. >>> inputs = tl.layers.Input([10, 5])
  38. >>> prelulayer = tl.layers.PRelu(channel_shared=True)
  39. References
  40. -----------
  41. - `Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification <http://arxiv.org/abs/1502.01852>`__
  42. - `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  43. """
  44. def __init__(
  45. self, channel_shared=False, in_channels=None, a_init=truncated_normal(mean=0.0, stddev=0.05), name=None,
  46. data_format='channels_last', dim=2
  47. ):
  48. super(PRelu, self).__init__(name)
  49. self.channel_shared = channel_shared
  50. self.in_channels = in_channels
  51. self.a_init = a_init
  52. self.data_format = data_format
  53. self.dim = dim
  54. if self.channel_shared:
  55. self.build((None, ))
  56. self._built = True
  57. elif self.in_channels is not None:
  58. self.build((None, self.in_channels))
  59. self._built = True
  60. logging.info("PRelu %s: channel_shared: %s" % (self.name, self.channel_shared))
  61. def __repr__(self):
  62. s = ('{classname}(')
  63. s += 'channel_shared={channel_shared},'
  64. s += 'in_channels={in_channels},'
  65. s += 'name={name}'
  66. s += ')'
  67. return s.format(classname=self.__class__.__name__, **self.__dict__)
  68. def build(self, inputs_shape):
  69. if self.channel_shared:
  70. w_shape = (1, )
  71. elif self.data_format == 'channels_last':
  72. w_shape = (self.in_channels, )
  73. elif self.data_format == 'channels_first':
  74. if self.dim == 2:
  75. w_shape = (1, self.in_channels, 1, 1)
  76. elif self.dim == 1:
  77. w_shape = (1, self.in_channels, 1)
  78. elif self.dim == 3:
  79. w_shape = (1, self.in_channels, 1, 1, 1)
  80. else:
  81. raise Exception("Dim should be equal to 1, 2 or 3")
  82. self.alpha_var = self._get_weights("alpha", shape=w_shape, init=self.a_init)
  83. self.relu = tl.ops.ReLU()
  84. self.sigmoid = tl.ops.Sigmoid()
  85. def forward(self, inputs):
  86. if self._forward_state == False:
  87. if self._built == False:
  88. self.build(tl.get_tensor_shape(inputs))
  89. self._built = True
  90. self._forward_state = True
  91. pos = self.relu(inputs)
  92. self.alpha_var_constrained = self.sigmoid(self.alpha_var)
  93. neg = -self.alpha_var_constrained * self.relu(-inputs)
  94. return pos + neg
  95. class PRelu6(Module):
  96. """
  97. The :class:`PRelu6` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
  98. This Layer is a modified version of the :class:`PRelu`.
  99. This activation layer use a modified version :func:`tl.act.leaky_relu` introduced by the following paper:
  100. `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  101. This activation function also use a modified version of the activation function :func:`tf.nn.relu6` introduced by the following paper:
  102. `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  103. This activation layer push further the logic by adding `leaky` behaviour both below zero and above six.
  104. The function return the following results:
  105. - When x < 0: ``f(x) = alpha_low * x``.
  106. - When x in [0, 6]: ``f(x) = x``.
  107. - When x > 6: ``f(x) = 6``.
  108. Parameters
  109. ----------
  110. channel_shared : boolean
  111. If True, single weight is shared by all channels.
  112. in_channels: int
  113. The number of channels of the previous layer.
  114. If None, it will be automatically detected when the layer is forwarded for the first time.
  115. a_init : initializer
  116. The initializer for initializing the alpha(s).
  117. name : None or str
  118. A unique layer name.
  119. References
  120. -----------
  121. - `Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification <http://arxiv.org/abs/1502.01852>`__
  122. - `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  123. - `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  124. """
  125. def __init__(
  126. self,
  127. channel_shared=False,
  128. in_channels=None,
  129. a_init=truncated_normal(mean=0.0, stddev=0.05),
  130. name=None, # "prelu6"
  131. data_format='channels_last',
  132. dim=2
  133. ):
  134. super(PRelu6, self).__init__(name)
  135. self.channel_shared = channel_shared
  136. self.in_channels = in_channels
  137. self.a_init = a_init
  138. self.data_format = data_format
  139. self.dim = dim
  140. if self.channel_shared:
  141. self.build((None, ))
  142. self._built = True
  143. elif self.in_channels is not None:
  144. self.build((None, self.in_channels))
  145. self._built = True
  146. logging.info("PRelu6 %s: channel_shared: %s" % (self.name, self.channel_shared))
  147. def __repr__(self):
  148. s = ('{classname}(')
  149. s += 'channel_shared={channel_shared},'
  150. s += 'in_channels={in_channels},'
  151. s += 'name={name}'
  152. s += ')'
  153. return s.format(classname=self.__class__.__name__, **self.__dict__)
  154. def build(self, inputs_shape):
  155. if self.channel_shared:
  156. w_shape = (1, )
  157. elif self.data_format == 'channels_last':
  158. w_shape = (self.in_channels, )
  159. elif self.data_format == 'channels_first':
  160. if self.dim == 2:
  161. w_shape = (1, self.in_channels, 1, 1)
  162. elif self.dim == 1:
  163. w_shape = (1, self.in_channels, 1)
  164. elif self.dim == 3:
  165. w_shape = (1, self.in_channels, 1, 1, 1)
  166. else:
  167. raise Exception("Dim should be equal to 1, 2 or 3")
  168. self.alpha_var = self._get_weights("alpha", shape=w_shape, init=self.a_init)
  169. self.sigmoid = tl.ops.Sigmoid()
  170. self.relu = tl.ops.ReLU()
  171. # @tf.function
  172. def forward(self, inputs):
  173. if self._forward_state == False:
  174. if self._built == False:
  175. self.build(tl.get_tensor_shape(inputs))
  176. self._built = True
  177. self._forward_state = True
  178. alpha_var_constrained = self.sigmoid(self.alpha_var)
  179. pos = self.relu(inputs)
  180. pos_6 = -self.relu(inputs - 6)
  181. neg = -alpha_var_constrained * self.relu(-inputs)
  182. return pos + pos_6 + neg
  183. class PTRelu6(Module):
  184. """
  185. The :class:`PTRelu6` class is Parametric Rectified Linear layer integrating ReLU6 behaviour.
  186. This Layer is a modified version of the :class:`PRelu`.
  187. This activation layer use a modified version :func:`tl.act.leaky_relu` introduced by the following paper:
  188. `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  189. This activation function also use a modified version of the activation function :func:`tf.nn.relu6` introduced by the following paper:
  190. `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  191. This activation layer push further the logic by adding `leaky` behaviour both below zero and above six.
  192. The function return the following results:
  193. - When x < 0: ``f(x) = alpha_low * x``.
  194. - When x in [0, 6]: ``f(x) = x``.
  195. - When x > 6: ``f(x) = 6 + (alpha_high * (x-6))``.
  196. This version goes one step beyond :class:`PRelu6` by introducing leaky behaviour on the positive side when x > 6.
  197. Parameters
  198. ----------
  199. channel_shared : boolean
  200. If True, single weight is shared by all channels.
  201. in_channels: int
  202. The number of channels of the previous layer.
  203. If None, it will be automatically detected when the layer is forwarded for the first time.
  204. a_init : initializer
  205. The initializer for initializing the alpha(s).
  206. name : None or str
  207. A unique layer name.
  208. References
  209. -----------
  210. - `Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification <http://arxiv.org/abs/1502.01852>`__
  211. - `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  212. - `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  213. """
  214. def __init__(
  215. self,
  216. channel_shared=False,
  217. in_channels=None,
  218. data_format='channels_last',
  219. a_init=truncated_normal(mean=0.0, stddev=0.05),
  220. name=None # "ptrelu6"
  221. ):
  222. super(PTRelu6, self).__init__(name)
  223. self.channel_shared = channel_shared
  224. self.in_channels = in_channels
  225. self.data_format = data_format
  226. self.a_init = a_init
  227. if self.channel_shared:
  228. self.build((None, ))
  229. self._built = True
  230. elif self.in_channels:
  231. self.build((None, self.in_channels))
  232. self._built = True
  233. logging.info("PTRelu6 %s: channel_shared: %s" % (self.name, self.channel_shared))
  234. def __repr__(self):
  235. s = ('{classname}(')
  236. s += 'channel_shared={channel_shared},'
  237. s += 'in_channels={in_channels},'
  238. s += 'name={name}'
  239. s += ')'
  240. return s.format(classname=self.__class__.__name__, **self.__dict__)
  241. def build(self, inputs_shape):
  242. if self.channel_shared:
  243. w_shape = (1, )
  244. elif self.data_format == 'channels_last':
  245. w_shape = (self.in_channels, )
  246. elif self.data_format == 'channels_first':
  247. if self.dim == 2:
  248. w_shape = (1, self.in_channels, 1, 1)
  249. elif self.dim == 1:
  250. w_shape = (1, self.in_channels, 1)
  251. elif self.dim == 3:
  252. w_shape = (1, self.in_channels, 1, 1, 1)
  253. else:
  254. raise Exception("Dim should be equal to 1, 2 or 3")
  255. # Alpha for outputs lower than zeros
  256. self.alpha_low = self._get_weights("alpha_low", shape=w_shape, init=self.a_init)
  257. self.sigmoid = tl.ops.Sigmoid()
  258. self.relu = tl.ops.ReLU()
  259. # Alpha for outputs higher than 6
  260. self.alpha_high = self._get_weights("alpha_high", shape=w_shape, init=self.a_init)
  261. # @tf.function
  262. def forward(self, inputs):
  263. if self._forward_state == False:
  264. if self._built == False:
  265. self.build(tl.get_tensor_shape(inputs))
  266. self._built = True
  267. self._forward_state = True
  268. alpha_low_constrained = self.sigmoid(self.alpha_low)
  269. alpha_high_constrained = self.sigmoid(self.alpha_high)
  270. pos = self.relu(inputs)
  271. pos_6 = -self.relu(inputs - 6) + alpha_high_constrained * self.relu(inputs - 6)
  272. neg = -alpha_low_constrained * self.relu(-inputs)
  273. return pos + pos_6 + neg
  274. class Ramp(Module):
  275. """Ramp activation function.
  276. Reference: [tf.clip_by_value]<https://www.tensorflow.org/api_docs/python/tf/clip_by_value>
  277. Parameters
  278. ----------
  279. x : Tensor
  280. input.
  281. v_min : float
  282. cap input to v_min as a lower bound.
  283. v_max : float
  284. cap input to v_max as a upper bound.
  285. Returns
  286. -------
  287. Tensor
  288. A ``Tensor`` in the same type as ``x``.
  289. """
  290. def __init__(self, v_min=0, v_max=1):
  291. super(Ramp, self).__init__()
  292. self._built = True
  293. self.v_min = v_min
  294. self.v_max = v_max
  295. def forward(self, x):
  296. return tl.ops.clip_by_value(x, clip_value_min=self.v_min, clip_value_max=self.v_max)
  297. class LeakyReLU(Module):
  298. """
  299. This function is a modified version of ReLU, introducing a nonzero gradient for negative input. Introduced by the paper:
  300. `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  301. The function return the following results:
  302. - When x < 0: ``f(x) = alpha_low * x``.
  303. - When x >= 0: ``f(x) = x``.
  304. Parameters
  305. ----------
  306. x : Tensor
  307. Support input type ``float``, ``double``, ``int32``, ``int64``, ``uint8``, ``int16``, or ``int8``.
  308. alpha : float
  309. Slope.
  310. name : str
  311. The function name (optional).
  312. Examples
  313. --------
  314. >>> import tensorlayer as tl
  315. >>> net = tl.layers.Input([10, 200])
  316. >>> net = tl.layers.LeakyReLU(alpha=0.5)(net)
  317. Returns
  318. -------
  319. Tensor
  320. A ``Tensor`` in the same type as ``x``.
  321. References
  322. ----------
  323. - `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  324. """
  325. def __init__(self, alpha=0.2):
  326. super(LeakyReLU, self).__init__()
  327. self._built = True
  328. self.alpha = alpha
  329. self._leakyrelu = tl.ops.LeakyReLU(alpha=alpha)
  330. def forward(self, x):
  331. return self._leakyrelu(x)
  332. class LeakyReLU6(Module):
  333. """
  334. This activation function is a modified version :func:`leaky_relu` introduced by the following paper:
  335. `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  336. This activation function also follows the behaviour of the activation function :func:`tf.ops.relu6` introduced by the following paper:
  337. `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  338. The function return the following results:
  339. - When x < 0: ``f(x) = alpha_low * x``.
  340. - When x in [0, 6]: ``f(x) = x``.
  341. - When x > 6: ``f(x) = 6``.
  342. Parameters
  343. ----------
  344. x : Tensor
  345. Support input type ``float``, ``double``, ``int32``, ``int64``, ``uint8``, ``int16``, or ``int8``.
  346. alpha : float
  347. Slope.
  348. name : str
  349. The function name (optional).
  350. Examples
  351. --------
  352. >>> import tensorlayer as tl
  353. >>> net = tl.layers.Input([10, 200])
  354. >>> net = tl.layers.LeakyReLU6(alpha=0.5)(net)
  355. Returns
  356. -------
  357. Tensor
  358. A ``Tensor`` in the same type as ``x``.
  359. References
  360. ----------
  361. - `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  362. - `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  363. """
  364. def __init__(self, alpha=0.2):
  365. super(LeakyReLU6, self).__init__()
  366. self._built = True
  367. if not (0 < alpha <= 1):
  368. raise ValueError("`alpha` value must be in [0, 1]`")
  369. self.alpha = alpha
  370. self.minimum = tl.ops.Minimum()
  371. self.maximum = tl.ops.Maximum()
  372. def forward(self, x):
  373. return self.minimum(self.maximum(x, self.alpha * x), 6)
  374. class LeakyTwiceRelu6(Module):
  375. """
  376. This activation function is a modified version :func:`leaky_relu` introduced by the following paper:
  377. `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  378. This activation function also follows the behaviour of the activation function :func:`tf.ops.relu6` introduced by the following paper:
  379. `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  380. This function push further the logic by adding `leaky` behaviour both below zero and above six.
  381. The function return the following results:
  382. - When x < 0: ``f(x) = alpha_low * x``.
  383. - When x in [0, 6]: ``f(x) = x``.
  384. - When x > 6: ``f(x) = 6 + (alpha_high * (x-6))``.
  385. Parameters
  386. ----------
  387. x : Tensor
  388. Support input type ``float``, ``double``, ``int32``, ``int64``, ``uint8``, ``int16``, or ``int8``.
  389. alpha_low : float
  390. Slope for x < 0: ``f(x) = alpha_low * x``.
  391. alpha_high : float
  392. Slope for x < 6: ``f(x) = 6 (alpha_high * (x-6))``.
  393. name : str
  394. The function name (optional).
  395. Examples
  396. --------
  397. >>> import tensorlayer as tl
  398. >>> net = tl.layers.Input([10, 200])
  399. >>> net = tl.layers.LeakyTwiceRelu6(alpha_low=0.5, alpha_high=0.2)(net)
  400. Returns
  401. -------
  402. Tensor
  403. A ``Tensor`` in the same type as ``x``.
  404. References
  405. ----------
  406. - `Rectifier Nonlinearities Improve Neural Network Acoustic Models [A. L. Maas et al., 2013] <https://ai.stanford.edu/~amaas/papers/relu_hybrid_icml2013_final.pdf>`__
  407. - `Convolutional Deep Belief Networks on CIFAR-10 [A. Krizhevsky, 2010] <http://www.cs.utoronto.ca/~kriz/conv-cifar10-aug2010.pdf>`__
  408. """
  409. def __init__(self, alpha_low=0.2, alpha_high=0.2):
  410. super(LeakyTwiceRelu6, self).__init__()
  411. self._built = True
  412. if not (0 < alpha_high <= 1):
  413. raise ValueError("`alpha_high` value must be in [0, 1]`")
  414. if not (0 < alpha_low <= 1):
  415. raise ValueError("`alpha_low` value must be in [0, 1]`")
  416. self.alpha_low = alpha_low
  417. self.alpha_high = alpha_high
  418. self.minimum = tl.ops.Minimum()
  419. self.maximum = tl.ops.Maximum()
  420. def forward(self, x):
  421. x_is_above_0 = self.minimum(x, 6 * (1 - self.alpha_high) + self.alpha_high * x)
  422. x_is_below_0 = self.minimum(self.alpha_low * x, 0)
  423. return self.maximum(x_is_above_0, x_is_below_0)
  424. class Swish(Module):
  425. """Swish function.
  426. See `Swish: a Self-Gated Activation Function <https://arxiv.org/abs/1710.05941>`__.
  427. Parameters
  428. ----------
  429. x : Tensor
  430. input.
  431. name: str
  432. function name (optional).
  433. Returns
  434. -------
  435. Tensor
  436. A ``Tensor`` in the same type as ``x``.
  437. """
  438. def __init__(self):
  439. super(Swish, self).__init__()
  440. self.sigmoid = tl.ops.Sigmoid()
  441. self._built = True
  442. def forward(self, x):
  443. return self.sigmoid(x) * x
  444. class HardTanh(Module):
  445. """Hard tanh activation function.
  446. Which is a ramp function with low bound of -1 and upper bound of 1, shortcut is `htanh`.
  447. Parameters
  448. ----------
  449. x : Tensor
  450. input.
  451. name : str
  452. The function name (optional).
  453. Returns
  454. -------
  455. Tensor
  456. A ``Tensor`` in the same type as ``x``.
  457. """
  458. def __init__(self):
  459. super(HardTanh, self).__init__()
  460. self._built = True
  461. def forward(self, x):
  462. return tl.ops.clip_by_value(x, -1, 1)
  463. class Mish(Module):
  464. """Mish activation function.
  465. Reference: [Mish: A Self Regularized Non-Monotonic Neural Activation Function .Diganta Misra, 2019]<https://arxiv.org/abs/1908.08681>
  466. Parameters
  467. ----------
  468. x : Tensor
  469. input.
  470. Returns
  471. -------
  472. Tensor
  473. A ``Tensor`` in the same type as ``x``.
  474. """
  475. def __init__(self):
  476. super(Mish, self).__init__()
  477. self._tanh = tl.ops.Tanh()
  478. self._softplus = tl.ops.Softplus()
  479. self._built = True
  480. def forward(self, x):
  481. return x * self._tanh(self._softplus(x))

TensorLayer3.0 是一款兼容多种深度学习框架为计算后端的深度学习库。计划兼容TensorFlow, Pytorch, MindSpore, Paddle.