Browse Source

fix bug in parameter init

tags/v0.7.0-beta
Wei Luning 5 years ago
parent
commit
051b019c96
4 changed files with 35 additions and 13 deletions
  1. +9
    -11
      mindspore/ccsrc/pipeline/jit/pipeline.cc
  2. +2
    -0
      mindspore/common/parameter.py
  3. +1
    -1
      mindspore/train/callback/_summary_collector.py
  4. +23
    -1
      tests/ut/python/nn/test_parameter.py

+ 9
- 11
mindspore/ccsrc/pipeline/jit/pipeline.cc View File

@@ -752,19 +752,17 @@ py::object ExecutorPy::Run(const py::tuple &args, const py::object &phase) {
return ExecDFGraph(info_, args, phase_s);
}
#else
if (backend == "ms" || backend == "ge") {
auto ret_val = std::make_shared<py::object>();
if (info_.count(phase_s) != 0 && info_[phase_s]->func_graph != nullptr) {
if (IsGraphOutputValueNodeOrParameter(info_[phase_s]->func_graph->output(), args, ret_val)) {
return *ret_val;
}
auto ret_val = std::make_shared<py::object>();
if (info_.count(phase_s) != 0 && info_[phase_s]->func_graph != nullptr) {
if (IsGraphOutputValueNodeOrParameter(info_[phase_s]->func_graph->output(), args, ret_val)) {
return *ret_val;
}
if (backend == "ge") {
if (args.size() > 0) {
return args[0];
}
return args;
}
if (backend == "ge") {
if (args.size() > 0) {
return args[0];
}
return args;
}
#endif
std::size_t full_arg_size = ArgListSize(phase_s);


+ 2
- 0
mindspore/common/parameter.py View File

@@ -389,6 +389,8 @@ class Parameter(MetaTensor):
raise RuntimeError("Must set or change parallel mode before any Initializer created.")
if self.init_mode is None:
return self
if self.inited_param is not None:
return self.inited_param
if layout is not None:
if not isinstance(layout, list):
raise TypeError("The layout should be list! layout is {}.".format(layout))


+ 1
- 1
mindspore/train/callback/_summary_collector.py View File

@@ -111,7 +111,7 @@ class SummaryCollector(Callback):
and float. Default: None, it means there is no custom data.
collect_tensor_freq (Optional[int]): The same semantics as the `collect_freq`, but controls TensorSummary only.
Because TensorSummary data is too large to be compared with other summary data, this parameter is used to
reduce its collection. By default, The maximum number of steps for collecting TensorSummary data is 21,
reduce its collection. By default, The maximum number of steps for collecting TensorSummary data is 20,
but it will not exceed the number of steps for collecting other summary data.
Default: None, which means to follow the behavior as described above. For example, given `collect_freq=10`,
when the total steps is 600, TensorSummary will be collected 20 steps, while other summary data 61 steps,


+ 23
- 1
tests/ut/python/nn/test_parameter.py View File

@@ -17,7 +17,7 @@
import numpy as np
import pytest

from mindspore import context, Tensor, Parameter, ParameterTuple
from mindspore import context, Tensor, Parameter, ParameterTuple, nn
from mindspore._checkparam import _check_str_by_regular
from mindspore.common import dtype as mstype
from mindspore.common.initializer import initializer
@@ -229,3 +229,25 @@ def test_parameter_lazy_init():
para.set_parameter_data(initializer('ones', [1, 2], mstype.float32), slice_shape=True)
assert np.array_equal(para.default_input.asnumpy(), np.ones((1, 2)))
context.reset_auto_parallel_context()


def test_parameter_as_output():
context.reset_auto_parallel_context()
context.set_auto_parallel_context(parallel_mode="semi_auto_parallel")
initial_input = initializer('One', shape=(2,), dtype=mstype.int32)
updated_input = Tensor([2, 2], mstype.int32)
class Net(nn.Cell):
def __init__(self, initial, updated):
super().__init__()
self.initial = initial
self.updated = updated
self.p = Parameter(self.initial, name="weight")
self.new_p = self.p.init_data()
self.new_p.set_parameter_data(self.updated)
def construct(self):
return self.new_p

net = Net(initial_input, updated_input)
output = net()
assert np.array_equal(output.asnumpy(), np.array([2, 2], np.int32))
context.reset_auto_parallel_context()

Loading…
Cancel
Save