Browse Source

!9967 revert dropout mindir

From: @jjfeing
Reviewed-by: @kisnwang,@zhoufeng54
Signed-off-by: @zhoufeng54
tags/v1.1.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
185cdaf01b
4 changed files with 42 additions and 16 deletions
  1. +0
    -6
      mindspore/ccsrc/backend/session/ascend_session.cc
  2. +20
    -4
      mindspore/nn/layer/basic.py
  3. +16
    -2
      mindspore/nn/layer/pooling.py
  4. +6
    -4
      mindspore/nn/loss/loss.py

+ 0
- 6
mindspore/ccsrc/backend/session/ascend_session.cc View File

@@ -439,17 +439,11 @@ void AscendSession::UnifyMindIR(const KernelGraphPtr &graph) {
}
auto optimizer = std::make_shared<opt::GraphOptimizer>();
auto unify_mindir_pm = std::make_shared<opt::PassManager>("unify_mindir_pm");
unify_mindir_pm->AddPass(std::make_shared<opt::DropoutGradUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::DropoutUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::MaxPool2MaxPoolWithArgmax>());
unify_mindir_pm->AddPass(std::make_shared<opt::MaxPoolWithArgmaxUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::MaxPoolGradWithArgmaxUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::Conv2DUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::Conv2DBackpropInputUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::Conv2DBackpropFilterUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::SparseSoftmaxCrossEntropyWithLogitsUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::GradSparseSoftmaxCrossEntropyWithLogitsUnifyMindIR>());
unify_mindir_pm->AddPass(std::make_shared<opt::GradSparseSoftmaxCrossEntropyWithLogitsUnifyMindIRV2>());

optimizer->AddPassManager(unify_mindir_pm);
(void)optimizer->Optimize(graph);


+ 20
- 4
mindspore/nn/layer/basic.py View File

@@ -141,21 +141,37 @@ class Dropout(Cell):
raise ValueError("dropout probability should be a number in range (0, 1], but got {}".format(keep_prob))
Validator.check_subclass("dtype", dtype, mstype.number_type, self.cls_name)
Validator.check_value_type('keep_prob', keep_prob, [float], self.cls_name)
self.keep_prob = keep_prob
seed0, seed1 = _get_graph_seed(0, "dropout")
self.seed0 = seed0
self.seed1 = seed1
self.keep_prob = keep_prob
self.dropout = P.Dropout(keep_prob, seed0, seed1)
self.dtype = dtype
self.get_shape = P.Shape()
self.dropout_gen_mask = P.DropoutGenMask(Seed0=self.seed0, Seed1=self.seed1)
self.dropout_do_mask = P.DropoutDoMask()
self.cast = P.Cast()
self.is_ascend = context.get_context('device_target') in ["Ascend"]
self.dropout = P.Dropout(keep_prob)

def construct(self, x):
if not self.training:
return x

if not self.is_ascend:
out, _ = self.dropout(x)
return out

if self.keep_prob == 1:
return x

out, _ = self.dropout(x)
return out
shape = self.get_shape(x)
dtype = P.DType()(x)
if _is_float_dtype(dtype):
keep_prob = self.cast(self.keep_prob, dtype)
else:
keep_prob = self.cast(self.keep_prob, mstype.float16)
output = self.dropout_gen_mask(shape, keep_prob)
return self.dropout_do_mask(x, output, keep_prob)

def extend_repr(self):
return 'keep_prob={}, dtype={}'.format(self.keep_prob, self.dtype)


+ 16
- 2
mindspore/nn/layer/pooling.py View File

@@ -124,9 +124,16 @@ class MaxPool2d(_PoolNd):
strides=self.stride,
padding=self.pad_mode,
data_format=self.format)
self.max_pool_with_arg_max = P.MaxPoolWithArgmax(ksize=self.kernel_size,
strides=self.stride,
padding=self.pad_mode)
self.is_tbe = context.get_context("device_target") == "Ascend"

def construct(self, x):
out = self.max_pool(x)
if self.is_tbe and self.training:
out = self.max_pool_with_arg_max(x)[0]
else:
out = self.max_pool(x)
return out


@@ -191,15 +198,22 @@ class MaxPool1d(_PoolNd):
self.max_pool = P.MaxPool(ksize=self.kernel_size,
strides=self.stride,
padding=self.pad_mode)
self.max_pool_with_arg_max = P.MaxPoolWithArgmax(ksize=self.kernel_size,
strides=self.stride,
padding=self.pad_mode)
self.shape = F.shape
self.reduce_mean = P.ReduceMean(keep_dims=True)
self.expand = P.ExpandDims()
self.squeeze = P.Squeeze(2)
self.is_tbe = context.get_context("device_target") == "Ascend"

def construct(self, x):
_shape_check(self.shape(x))
x = self.expand(x, 2)
output = self.max_pool(x)
if self.is_tbe and self.training:
output = self.max_pool_with_arg_max(x)[0]
else:
output = self.max_pool(x)
output = self.squeeze(output)
return output



+ 6
- 4
mindspore/nn/loss/loss.py View File

@@ -267,13 +267,15 @@ class SoftmaxCrossEntropyWithLogits(_Loss):
self.on_value = Tensor(1.0, mstype.float32)
self.off_value = Tensor(0., mstype.float32)
self.is_cpugpu = context.get_context('device_target') in ["CPU", "GPU"]
self.sparse_softmax_cross_entropy = P.SparseSoftmaxCrossEntropyWithLogits()
if self.is_cpugpu:
self.sparse_softmax_cross_entropy = P.SparseSoftmaxCrossEntropyWithLogits()

def construct(self, logits, labels):
if self.is_cpugpu and self.sparse and self.reduction == 'mean':
x = self.sparse_softmax_cross_entropy(logits, labels)
return x

if self.sparse:
if self.reduction == 'mean':
x = self.sparse_softmax_cross_entropy(logits, labels)
return x
labels = self.one_hot(labels, F.shape(logits)[-1], self.on_value, self.off_value)
x = self.softmax_cross_entropy(logits, labels)[0]
return self.get_loss(x)


Loading…
Cancel
Save