Browse Source

delete SoftmaxCrossEntropyExpand

tags/v1.0.0
guohongzilong 5 years ago
parent
commit
a754dea90c
4 changed files with 1 additions and 107 deletions
  1. +1
    -2
      mindspore/nn/loss/__init__.py
  2. +0
    -64
      mindspore/nn/loss/loss.py
  3. +0
    -10
      tests/ut/python/nn/test_loss.py
  4. +0
    -31
      tests/ut/python/parallel/test_softmax_cross_entropy_expand.py

+ 1
- 2
mindspore/nn/loss/__init__.py View File

@@ -20,9 +20,8 @@ It shows how well the model works on a dataset and the optimization target which
"""

from .loss import L1Loss, MSELoss, SmoothL1Loss, \
SoftmaxCrossEntropyWithLogits, SoftmaxCrossEntropyExpand, CosineEmbeddingLoss
SoftmaxCrossEntropyWithLogits, CosineEmbeddingLoss

__all__ = ['L1Loss', 'MSELoss', 'SmoothL1Loss',
'SoftmaxCrossEntropyWithLogits',
'SoftmaxCrossEntropyExpand',
'CosineEmbeddingLoss']

+ 0
- 64
mindspore/nn/loss/loss.py View File

@@ -262,70 +262,6 @@ class SoftmaxCrossEntropyWithLogits(_Loss):
return self.get_loss(x)


class SoftmaxCrossEntropyExpand(Cell):
r"""
Computes softmax cross entropy between logits and labels. Implemented by expanded formula.

This is a wrapper of several functions.

.. math::
\ell(x_i, t_i) = -log\left(\frac{\exp(x_{t_i})}{\sum_j \exp(x_j)}\right),
where :math:`x_i` is a 1D score Tensor, :math:`t_i` is the target class.

Note:
When argument sparse is set to True, the format of the label is the index
ranging from :math:`0` to :math:`C - 1` instead of one-hot vectors.

Args:
sparse(bool): Specifies whether labels use sparse format or not. Default: False.

Inputs:
- **input_data** (Tensor) - Tensor of shape :math:`(x_1, x_2, ..., x_R)`.
- **label** (Tensor) - Tensor of shape :math:`(y_1, y_2, ..., y_S)`.

Outputs:
Tensor, a scalar tensor including the mean loss.

Examples:
>>> loss = nn.SoftmaxCrossEntropyExpand(sparse=True)
>>> input_data = Tensor(np.ones([64, 512]), dtype=mindspore.float32)
>>> label = Tensor(np.ones([64]), dtype=mindspore.int32)
>>> loss(input_data, label)
"""
def __init__(self, sparse=False):
super(SoftmaxCrossEntropyExpand, self).__init__()
self.exp = P.Exp()
self.reduce_sum = P.ReduceSum(keep_dims=True)
self.onehot = P.OneHot()
self.on_value = Tensor(1.0, mstype.float32)
self.off_value = Tensor(0.0, mstype.float32)
self.div = P.Div()
self.log = P.Log()
self.sum_cross_entropy = P.ReduceSum(keep_dims=False)
self.mul = P.Mul()
self.mul2 = P.Mul()
self.cast = P.Cast()
self.reduce_mean = P.ReduceMean(keep_dims=False)
self.sparse = sparse
self.reduce_max = P.ReduceMax(keep_dims=True)
self.sub = P.Sub()

def construct(self, logit, label):
logit_max = self.reduce_max(logit, -1)
exp = self.exp(self.sub(logit, logit_max))
exp_sum = self.reduce_sum(exp, -1)
softmax_result = self.div(exp, exp_sum)
if self.sparse:
label = self.onehot(label, F.shape(logit)[1], self.on_value, self.off_value)

softmax_result_log = self.log(softmax_result)
loss = self.sum_cross_entropy((self.mul(softmax_result_log, label)), -1)
loss = self.mul2(F.scalar_to_array(-1.0), loss)
loss = self.reduce_mean(loss, -1)

return loss


@constexpr
def _check_reduced_shape_valid(ori_shape, reduced_shape, axis, cls_name):
validator.check_reduce_shape(ori_shape, reduced_shape, axis, cls_name)


+ 0
- 10
tests/ut/python/nn/test_loss.py View File

@@ -17,7 +17,6 @@ import numpy as np

import mindspore.nn as nn
from mindspore import Tensor
from mindspore.common.api import _executor
from ..ut_filter import non_graph_engine


@@ -54,15 +53,6 @@ def test_SoftmaxCrossEntropyWithLogits_reduce():
loss(logits, labels)


def test_SoftmaxCrossEntropyExpand():
from mindspore import context
context.set_context(mode=context.GRAPH_MODE)
loss = nn.SoftmaxCrossEntropyExpand()

logits = Tensor(np.random.randint(0, 9, [100, 10]).astype(np.float32))
labels = Tensor(np.random.randint(0, 9, [10,]).astype(np.float32))
_executor.compile(loss, logits, labels)

def test_cosine_embedding_loss():
""" test CosineEmbeddingLoss """
loss = nn.CosineEmbeddingLoss()


+ 0
- 31
tests/ut/python/parallel/test_softmax_cross_entropy_expand.py View File

@@ -1,31 +0,0 @@
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import numpy as np

from mindspore import Tensor
from mindspore import context
from mindspore.common import dtype as mstype
from mindspore.common.api import _executor
from mindspore.nn.loss.loss import SoftmaxCrossEntropyExpand


def test_SoftmaxCrossEntropy():
net = SoftmaxCrossEntropyExpand(sparse=True)
context.set_auto_parallel_context(parallel_mode="auto_parallel")
logit = Tensor(np.ones([64, 512]), dtype=mstype.float32)
label = Tensor(np.ones([64]), dtype=mstype.int32)
context.set_auto_parallel_context(device_num=8, global_rank=0)
net.set_auto_parallel()
_executor.compile(net, logit, label)

Loading…
Cancel
Save