From a754dea90cde271dd16f4848a882083b590bc077 Mon Sep 17 00:00:00 2001 From: guohongzilong <2713219276@qq.com> Date: Sun, 20 Sep 2020 20:53:47 +0800 Subject: [PATCH] delete SoftmaxCrossEntropyExpand --- mindspore/nn/loss/__init__.py | 3 +- mindspore/nn/loss/loss.py | 64 ------------------- tests/ut/python/nn/test_loss.py | 10 --- .../test_softmax_cross_entropy_expand.py | 31 --------- 4 files changed, 1 insertion(+), 107 deletions(-) delete mode 100644 tests/ut/python/parallel/test_softmax_cross_entropy_expand.py diff --git a/mindspore/nn/loss/__init__.py b/mindspore/nn/loss/__init__.py index ce5870699b0a..b0ead67f7b8a 100644 --- a/mindspore/nn/loss/__init__.py +++ b/mindspore/nn/loss/__init__.py @@ -20,9 +20,8 @@ It shows how well the model works on a dataset and the optimization target which """ from .loss import L1Loss, MSELoss, SmoothL1Loss, \ - SoftmaxCrossEntropyWithLogits, SoftmaxCrossEntropyExpand, CosineEmbeddingLoss + SoftmaxCrossEntropyWithLogits, CosineEmbeddingLoss __all__ = ['L1Loss', 'MSELoss', 'SmoothL1Loss', 'SoftmaxCrossEntropyWithLogits', - 'SoftmaxCrossEntropyExpand', 'CosineEmbeddingLoss'] diff --git a/mindspore/nn/loss/loss.py b/mindspore/nn/loss/loss.py index f3833ac56d14..bebfbe0b5b46 100644 --- a/mindspore/nn/loss/loss.py +++ b/mindspore/nn/loss/loss.py @@ -262,70 +262,6 @@ class SoftmaxCrossEntropyWithLogits(_Loss): return self.get_loss(x) -class SoftmaxCrossEntropyExpand(Cell): - r""" - Computes softmax cross entropy between logits and labels. Implemented by expanded formula. - - This is a wrapper of several functions. - - .. math:: - \ell(x_i, t_i) = -log\left(\frac{\exp(x_{t_i})}{\sum_j \exp(x_j)}\right), - where :math:`x_i` is a 1D score Tensor, :math:`t_i` is the target class. - - Note: - When argument sparse is set to True, the format of the label is the index - ranging from :math:`0` to :math:`C - 1` instead of one-hot vectors. - - Args: - sparse(bool): Specifies whether labels use sparse format or not. Default: False. - - Inputs: - - **input_data** (Tensor) - Tensor of shape :math:`(x_1, x_2, ..., x_R)`. - - **label** (Tensor) - Tensor of shape :math:`(y_1, y_2, ..., y_S)`. - - Outputs: - Tensor, a scalar tensor including the mean loss. - - Examples: - >>> loss = nn.SoftmaxCrossEntropyExpand(sparse=True) - >>> input_data = Tensor(np.ones([64, 512]), dtype=mindspore.float32) - >>> label = Tensor(np.ones([64]), dtype=mindspore.int32) - >>> loss(input_data, label) - """ - def __init__(self, sparse=False): - super(SoftmaxCrossEntropyExpand, self).__init__() - self.exp = P.Exp() - self.reduce_sum = P.ReduceSum(keep_dims=True) - self.onehot = P.OneHot() - self.on_value = Tensor(1.0, mstype.float32) - self.off_value = Tensor(0.0, mstype.float32) - self.div = P.Div() - self.log = P.Log() - self.sum_cross_entropy = P.ReduceSum(keep_dims=False) - self.mul = P.Mul() - self.mul2 = P.Mul() - self.cast = P.Cast() - self.reduce_mean = P.ReduceMean(keep_dims=False) - self.sparse = sparse - self.reduce_max = P.ReduceMax(keep_dims=True) - self.sub = P.Sub() - - def construct(self, logit, label): - logit_max = self.reduce_max(logit, -1) - exp = self.exp(self.sub(logit, logit_max)) - exp_sum = self.reduce_sum(exp, -1) - softmax_result = self.div(exp, exp_sum) - if self.sparse: - label = self.onehot(label, F.shape(logit)[1], self.on_value, self.off_value) - - softmax_result_log = self.log(softmax_result) - loss = self.sum_cross_entropy((self.mul(softmax_result_log, label)), -1) - loss = self.mul2(F.scalar_to_array(-1.0), loss) - loss = self.reduce_mean(loss, -1) - - return loss - - @constexpr def _check_reduced_shape_valid(ori_shape, reduced_shape, axis, cls_name): validator.check_reduce_shape(ori_shape, reduced_shape, axis, cls_name) diff --git a/tests/ut/python/nn/test_loss.py b/tests/ut/python/nn/test_loss.py index 7d2329dcfe2f..f055443dbea6 100644 --- a/tests/ut/python/nn/test_loss.py +++ b/tests/ut/python/nn/test_loss.py @@ -17,7 +17,6 @@ import numpy as np import mindspore.nn as nn from mindspore import Tensor -from mindspore.common.api import _executor from ..ut_filter import non_graph_engine @@ -54,15 +53,6 @@ def test_SoftmaxCrossEntropyWithLogits_reduce(): loss(logits, labels) -def test_SoftmaxCrossEntropyExpand(): - from mindspore import context - context.set_context(mode=context.GRAPH_MODE) - loss = nn.SoftmaxCrossEntropyExpand() - - logits = Tensor(np.random.randint(0, 9, [100, 10]).astype(np.float32)) - labels = Tensor(np.random.randint(0, 9, [10,]).astype(np.float32)) - _executor.compile(loss, logits, labels) - def test_cosine_embedding_loss(): """ test CosineEmbeddingLoss """ loss = nn.CosineEmbeddingLoss() diff --git a/tests/ut/python/parallel/test_softmax_cross_entropy_expand.py b/tests/ut/python/parallel/test_softmax_cross_entropy_expand.py deleted file mode 100644 index 4116a92cee62..000000000000 --- a/tests/ut/python/parallel/test_softmax_cross_entropy_expand.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2019 Huawei Technologies Co., Ltd -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import numpy as np - -from mindspore import Tensor -from mindspore import context -from mindspore.common import dtype as mstype -from mindspore.common.api import _executor -from mindspore.nn.loss.loss import SoftmaxCrossEntropyExpand - - -def test_SoftmaxCrossEntropy(): - net = SoftmaxCrossEntropyExpand(sparse=True) - context.set_auto_parallel_context(parallel_mode="auto_parallel") - logit = Tensor(np.ones([64, 512]), dtype=mstype.float32) - label = Tensor(np.ones([64]), dtype=mstype.int32) - context.set_auto_parallel_context(device_num=8, global_rank=0) - net.set_auto_parallel() - _executor.compile(net, logit, label) -- Gitee