From 8d17a2b8aff7fa5bd2208a21d0ba4d92e2d68788 Mon Sep 17 00:00:00 2001 From: Wei Luning Date: Tue, 15 Sep 2020 11:43:07 +0800 Subject: [PATCH] * add doc for some Parameter property.* add export in quant __all__* fix bug for isinstance & add ut for it --- mindspore/common/dtype.py | 2 ++ mindspore/common/parameter.py | 19 ++++++++++++--- mindspore/core/ir/dtype/number.h | 21 +++++++++++++--- mindspore/ops/operations/array_ops.py | 7 +++++- mindspore/train/model.py | 5 +++- mindspore/train/quant/__init__.py | 4 ++-- .../pynative_mode/ops/test_isinstance.py | 24 +++++++++++++++++++ 7 files changed, 72 insertions(+), 10 deletions(-) create mode 100644 tests/ut/python/pynative_mode/ops/test_isinstance.py diff --git a/mindspore/common/dtype.py b/mindspore/common/dtype.py index b940d1b879..8351bfb51c 100644 --- a/mindspore/common/dtype.py +++ b/mindspore/common/dtype.py @@ -34,6 +34,8 @@ __dtype__ = [ "float64", "double", "bool_", "float_", "list_", "tuple_", + "int_", "uint", + "number", "tensor", "string", "type_none", "tensor_type", "Type" diff --git a/mindspore/common/parameter.py b/mindspore/common/parameter.py index 69cdee9a1a..ceb2c7a135 100644 --- a/mindspore/common/parameter.py +++ b/mindspore/common/parameter.py @@ -180,7 +180,12 @@ class Parameter(MetaTensor): @property def inited_param(self): - """Get the new parameter after call the init_data.""" + """ + Get the new parameter after call the init_data. + + Default is a None, If `self` is a Parameter with out data, after call the + `init_data` the initialized Parameter with data will be recorded here. + """ return self._inited_param @@ -232,7 +237,14 @@ class Parameter(MetaTensor): @property def is_init(self): - """Get the initialization status of the parameter.""" + """ + Get the initialization status of the parameter. + + In GE backend, the Parameter need a "init graph" to sync the data from host to device. + This flag indicates whether the data as been sync to the device. + + This flag only work in GE, and it will be set to False in other backend. + """ return self._is_init @is_init.setter @@ -250,7 +262,8 @@ class Parameter(MetaTensor): Clone the parameter. Args: - prefix (str): Namespace of parameter. + prefix (str): Namespace of parameter. The cloned Parameter name is + combined of prefix and current name: `f"{perfix}.{self.name}"`. init (Union[Tensor, str, Initializer, numbers.Number]): Initialize the shape of the parameter. Default: 'same'. diff --git a/mindspore/core/ir/dtype/number.h b/mindspore/core/ir/dtype/number.h index ae7d65419b..d753546b2e 100644 --- a/mindspore/core/ir/dtype/number.h +++ b/mindspore/core/ir/dtype/number.h @@ -92,7 +92,12 @@ class Int : public Number { ~Int() override = default; MS_DECLARE_PARENT(Int, Number) TypeId generic_type_id() const override { return kNumberTypeInt; } - TypePtr DeepCopy() const override { return std::make_shared(nbits()); } + TypePtr DeepCopy() const override { + if (nbits() == 0) { + return std::make_shared(); + } + return std::make_shared(nbits()); + } std::string ToString() const override { return GetTypeName("Int"); } std::string ToReprString() const override { return nbits() == 0 ? "int_" : GetTypeName("int"); } std::string DumpText() const override { @@ -110,7 +115,12 @@ class UInt : public Number { ~UInt() override {} MS_DECLARE_PARENT(UInt, Number) - TypePtr DeepCopy() const override { return std::make_shared(nbits()); } + TypePtr DeepCopy() const override { + if (nbits() == 0) { + return std::make_shared(); + } + return std::make_shared(nbits()); + } std::string ToString() const override { return GetTypeName("UInt"); } std::string ToReprString() const override { return GetTypeName("uint"); } std::string DumpText() const override { @@ -127,7 +137,12 @@ class Float : public Number { MS_DECLARE_PARENT(Float, Number) TypeId generic_type_id() const override { return kNumberTypeFloat; } - TypePtr DeepCopy() const override { return std::make_shared(nbits()); } + TypePtr DeepCopy() const override { + if (nbits() == 0) { + return std::make_shared(); + } + return std::make_shared(nbits()); + } std::string ToString() const override { return GetTypeName("Float"); } std::string ToReprString() const override { return nbits() == 0 ? "float_" : GetTypeName("float"); } std::string DumpText() const override { diff --git a/mindspore/ops/operations/array_ops.py b/mindspore/ops/operations/array_ops.py index c566520680..e703ec4da3 100644 --- a/mindspore/ops/operations/array_ops.py +++ b/mindspore/ops/operations/array_ops.py @@ -349,7 +349,12 @@ class IsInstance(PrimitiveWithInfer): validator.check_const_input("inst", inst['value'], self.name) validator.check_value_type("type_", type_v, [mstype.Type], self.name) - value = mstype.issubclass_(sub_type_t, type_v) + if type_v == mstype.list_: + value = isinstance(sub_type_t, list) + elif type_v == mstype.tuple_: + value = isinstance(sub_type_t, tuple) + else: + value = mstype.issubclass_(sub_type_t, type_v) out = {'shape': (), 'dtype': mstype.type_type, diff --git a/mindspore/train/model.py b/mindspore/train/model.py index a523d44f9a..a0136cbe95 100755 --- a/mindspore/train/model.py +++ b/mindspore/train/model.py @@ -70,11 +70,14 @@ class Model: value would be passed to the `Loss` metric, the predicted value and label would be passed to other metric. Default: None. amp_level (str): Option for argument `level` in `mindspore.amp.build_train_network`, level for mixed - precision training. Supports [O0, O2, O3]. Default: "O0". + precision training. Supports ["O0", "O2", "O3", "auto"]. Default: "O0". - O0: Do not change. - O2: Cast network to float16, keep batchnorm run in float32, using dynamic loss scale. - O3: Cast network to float16, with additional property 'keep_batchnorm_fp32=False'. + - auto: Set to level to recommended level in different devices. Set level to O2 on GPU, Set + level to O3 Ascend. The recommended level is choose by the export experience, cannot + always generalize. User should specify the level for special network. O2 is recommended on GPU, O3 is recommended on Ascend. diff --git a/mindspore/train/quant/__init__.py b/mindspore/train/quant/__init__.py index aff8dcfc82..39c1cb1d4b 100644 --- a/mindspore/train/quant/__init__.py +++ b/mindspore/train/quant/__init__.py @@ -21,6 +21,6 @@ operations. Note that the entire computation is carried out in floating point. A aware training, MindSpore provides conversion functions to convert the trained model into lower precision. """ -from .quant import convert_quant_network +from .quant import convert_quant_network, export -__all__ = ["convert_quant_network"] +__all__ = ["convert_quant_network", "export"] diff --git a/tests/ut/python/pynative_mode/ops/test_isinstance.py b/tests/ut/python/pynative_mode/ops/test_isinstance.py new file mode 100644 index 0000000000..b1c62c4cca --- /dev/null +++ b/tests/ut/python/pynative_mode/ops/test_isinstance.py @@ -0,0 +1,24 @@ +# Copyright 2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ +""" test_multitype """ +import mindspore as ms +from mindspore.ops import operations as P + + +def test_isinstance(): + assert P.IsInstance()([1, 2, 3], ms.list_) is True + assert P.IsInstance()((1, 2, 3), ms.tuple_) is True + assert P.IsInstance()(1.0, ms.float_) is True + assert P.IsInstance()(1, ms.int_) is True