Browse Source

modify redundant code

pull/15404/head
changzherui 4 years ago
parent
commit
5c0af5746e
7 changed files with 5 additions and 13 deletions
  1. +3
    -1
      mindspore/_check_deps_version.py
  2. +1
    -3
      mindspore/_check_version.py
  3. +0
    -3
      mindspore/common/parameter.py
  4. +0
    -1
      mindspore/context.py
  5. +0
    -1
      mindspore/core/ops/adam.cc
  6. +0
    -1
      mindspore/core/ops/assert.cc
  7. +1
    -3
      mindspore/nn/layer/thor_layer.py

+ 3
- 1
mindspore/_check_deps_version.py View File

@@ -34,6 +34,7 @@ def parse_args():
args = parser.parse_args()
return args


def check_deps_version(mindspore_version, supported_version):
"""
check te/hccl/topi version
@@ -62,17 +63,18 @@ def check_deps_version(mindspore_version, supported_version):
print(f"MindSpore version {mindspore_version} and \"topi\" wheel package version {v} does not "
"match, reference to the match info on: https://www.mindspore.cn/install")

# pylint: disable=broad-except
except Exception as e:
print("CheckFailed: ", e.args)
print("Minspore relies on the 3 whl packages of \"te\", \"topi\" and \"hccl\" in the \"fwkacllib\" "
"folder of the Ascend 910 AI software package, please check whether they are installed "
"correctly or not, reference to the match info on: https://www.mindspore.cn/install")


def main():
args = parse_args()
check_deps_version(args.mindspore_version, args.supported_version)


if __name__ == "__main__":
sys.path = sys.path[1:] # avoid the impact of relative path env, only affect this process
main()

+ 1
- 3
mindspore/_check_version.py View File

@@ -292,9 +292,7 @@ class AscendEnvChecker(EnvChecker):
return

try:
# pylint: disable=unused-import
import te
# pylint: disable=broad-except
except Exception:
if Path(self.tbe_path).is_dir():
if os.getenv('LD_LIBRARY_PATH'):
@@ -373,6 +371,7 @@ class AscendEnvChecker(EnvChecker):
return self.v
return self.v


def check_version_and_env_config():
"""check version and env config"""
if __package_name__.lower() == "mindspore-ascend":
@@ -384,7 +383,6 @@ def check_version_and_env_config():
return

try:
# pylint: disable=unused-import
from . import _c_expression
# check version of ascend site or cuda
env_checker.check_version()


+ 0
- 3
mindspore/common/parameter.py View File

@@ -350,7 +350,6 @@ class Parameter(Tensor_):
Parameter, a new parameter.
"""
x = copy(self)
# pylint: disable=protected-access
x.param_info = self.param_info.clone()
x.is_init = False
x.init = self.init
@@ -426,11 +425,9 @@ class Parameter(Tensor_):
def _update_tensor_data(self, data):
"Update the parameter by a Tensor."
if isinstance(self, Tensor):
# for Tensor same shape:
self.init_flag = False
self.init = None
return self.assign_value(data)
# create a new tensor
new_param = Parameter(data, self.name, self.requires_grad)
new_param.param_info = self.param_info
return new_param


+ 0
- 1
mindspore/context.py View File

@@ -238,7 +238,6 @@ class _Context:
graph_memory_max_size = _DEVICE_APP_MEMORY_SIZE - int(variable_memory_max_size[:-2])
graph_memory_max_size_ = str(graph_memory_max_size) + " * 1024 * 1024 * 1024"
self.set_param(ms_ctx_param.variable_memory_max_size, variable_memory_max_size_)
# pylint: disable=protected-access
self.set_param(ms_ctx_param._graph_memory_max_size, graph_memory_max_size_)

def set_max_device_memory(self, max_device_memory):


+ 0
- 1
mindspore/core/ops/adam.cc View File

@@ -43,7 +43,6 @@ abstract::AbstractBasePtr AdamInfer(const PrimitivePtr &primitive, const std::ve
auto infer_m_type = CheckAndConvertUtils::CheckTensorTypeValid("m_type", m_type, common_valid_types, prim_name);
auto infer_v_type = CheckAndConvertUtils::CheckTensorTypeValid("v_type", v_type, common_valid_types, prim_name);
(void)CheckAndConvertUtils::CheckTensorTypeValid("grad_type", grad_type, common_valid_types, prim_name);
// auto infer_grad_type = grad_type->cast<TensorTypePtr>()->element();
auto output0 = std::make_shared<abstract::AbstractTensor>(infer_var_type, var_shape);
auto output1 = std::make_shared<abstract::AbstractTensor>(infer_m_type, m_shape);
auto output2 = std::make_shared<abstract::AbstractTensor>(infer_v_type, v_shape);


+ 0
- 1
mindspore/core/ops/assert.cc View File

@@ -52,7 +52,6 @@ AbstractBasePtr AssertInfer(const abstract::AnalysisEnginePtr &, const Primitive
if (condition_shape[0] == 1) {
auto condition_value = reinterpret_cast<bool *>(input_args[0]->BuildValue()->cast<tensor::TensorPtr>()->data_c());
MS_EXCEPTION_IF_NULL(condition_value);
// auto condition_value = GetValue<bool>(input_args[0]->BuildValue());
CheckAndConvertUtils::CheckInteger("condition[0]", *condition_value, kEqual, 1, op_name);
}
condition = input_args[0]->BuildType();


+ 1
- 3
mindspore/nn/layer/thor_layer.py View File

@@ -184,10 +184,9 @@ class Dense_Thor(Cell):
s = 'input_channels={}, output_channels={}'.format(self.in_channels, self.out_channels)
if self.has_bias:
s += ', has_bias={}'.format(self.has_bias)
# if self.activation_flag:
# s += ', activation={}'.format(self.activation)
return s


class _Conv(Cell):
"""
Applies a N-D convolution over an input signal composed of several input planes.
@@ -212,7 +211,6 @@ class _Conv(Cell):
self.kernel_size = kernel_size
self.stride = stride
self.pad_mode = pad_mode
# self.weight_init = weight_init
self.bias_init = bias_init
if isinstance(padding, int):
Validator.check_non_negative_int(padding, 'padding', self.cls_name)


Loading…
Cancel
Save