Browse Source

!15409 Add indexes attribute for metrics and make indexes aware

From: @zhiqwang
Reviewed-by: @kingxian,@kingxian
Signed-off-by: @kingxian,@kingxian
pull/15409/MERGE
mindspore-ci-bot Gitee 5 years ago
parent
commit
975e447737
22 changed files with 139 additions and 19 deletions
  1. +2
    -1
      mindspore/nn/metrics/accuracy.py
  2. +2
    -1
      mindspore/nn/metrics/bleu_score.py
  3. +3
    -1
      mindspore/nn/metrics/confusion_matrix.py
  4. +2
    -1
      mindspore/nn/metrics/cosine_similarity.py
  5. +2
    -1
      mindspore/nn/metrics/dice.py
  6. +3
    -1
      mindspore/nn/metrics/error.py
  7. +3
    -2
      mindspore/nn/metrics/fbeta.py
  8. +2
    -1
      mindspore/nn/metrics/hausdorff_distance.py
  9. +2
    -1
      mindspore/nn/metrics/loss.py
  10. +2
    -1
      mindspore/nn/metrics/mean_surface_distance.py
  11. +28
    -1
      mindspore/nn/metrics/metric.py
  12. +2
    -1
      mindspore/nn/metrics/occlusion_sensitivity.py
  13. +2
    -1
      mindspore/nn/metrics/perplexity.py
  14. +2
    -1
      mindspore/nn/metrics/precision.py
  15. +2
    -1
      mindspore/nn/metrics/recall.py
  16. +2
    -1
      mindspore/nn/metrics/roc.py
  17. +2
    -1
      mindspore/nn/metrics/root_mean_square_surface_distance.py
  18. +2
    -1
      mindspore/nn/metrics/topk.py
  19. +12
    -0
      tests/ut/python/metrics/test_accuracy.py
  20. +36
    -0
      tests/ut/python/metrics/test_metric_factory.py
  21. +13
    -0
      tests/ut/python/metrics/test_occlusion_sensitivity.py
  22. +13
    -0
      tests/ut/python/metrics/test_root_mean_square_distance.py

+ 2
- 1
mindspore/nn/metrics/accuracy.py View File

@@ -14,7 +14,7 @@
# ============================================================================
"""Accuracy."""
import numpy as np
from .metric import EvaluationBase
from .metric import EvaluationBase, rearrange_inputs


class Accuracy(EvaluationBase):
@@ -54,6 +54,7 @@ class Accuracy(EvaluationBase):
self._total_num = 0
self._class_num = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result :math:`y_{pred}` and :math:`y`.


+ 2
- 1
mindspore/nn/metrics/bleu_score.py View File

@@ -16,7 +16,7 @@
from collections import Counter
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class BleuScore(Metric):
@@ -82,6 +82,7 @@ class BleuScore(Metric):

return ngram_counter

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result with `candidate_corpus` and `reference_corpus`.


+ 3
- 1
mindspore/nn/metrics/confusion_matrix.py View File

@@ -15,7 +15,7 @@
"""ConfusionMatrixMetric & ConfusionMatrix."""
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class ConfusionMatrix(Metric):
@@ -77,6 +77,7 @@ class ConfusionMatrix(Metric):
self.confusion_matrix = np.zeros((self.num_classes, self.num_classes))
self._is_update = False

@rearrange_inputs
def update(self, *inputs):
"""
Update state with y_pred and y.
@@ -210,6 +211,7 @@ class ConfusionMatrixMetric(Metric):
self._total_tn = 0.0
self._total_fn = 0.0

@rearrange_inputs
def update(self, *inputs):
"""
Update state with predictions and targets.


+ 2
- 1
mindspore/nn/metrics/cosine_similarity.py View File

@@ -15,7 +15,7 @@
"""CosineSimilarity."""
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class CosineSimilarity(Metric):
@@ -61,6 +61,7 @@ class CosineSimilarity(Metric):
self.sqr_mtx_res = 0
self._is_update = False

@rearrange_inputs
def update(self, inputs):
"""
Updates the internal evaluation result with 'input1'.


+ 2
- 1
mindspore/nn/metrics/dice.py View File

@@ -15,7 +15,7 @@
"""Dice"""
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class Dice(Metric):
@@ -59,6 +59,7 @@ class Dice(Metric):
self._dice_coeff_sum = 0
self._samples_num = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result :math:`y_pred` and :math:`y`.


+ 3
- 1
mindspore/nn/metrics/error.py View File

@@ -14,7 +14,7 @@
# ============================================================================
"""Error."""
import numpy as np
from .metric import Metric
from .metric import Metric, rearrange_inputs


class MAE(Metric):
@@ -51,6 +51,7 @@ class MAE(Metric):
self._abs_error_sum = 0
self._samples_num = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result :math:`y_{pred}` and :math:`y`.
@@ -114,6 +115,7 @@ class MSE(Metric):
self._squared_error_sum = 0
self._samples_num = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result :math:`y_{pred}` and :math:`y`.


+ 3
- 2
mindspore/nn/metrics/fbeta.py View File

@@ -16,14 +16,14 @@
import sys
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class Fbeta(Metric):
r"""
Calculates the fbeta score.

Fbeta score is a weighted mean of precison and recall.
Fbeta score is a weighted mean of precision and recall.

.. math::
F_\beta=\frac{(1+\beta^2) \cdot true\_positive}
@@ -57,6 +57,7 @@ class Fbeta(Metric):
self._positives = 0
self._class_num = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result `y_pred` and `y`.


+ 2
- 1
mindspore/nn/metrics/hausdorff_distance.py View File

@@ -20,7 +20,7 @@ from scipy.ndimage import morphology
import numpy as np
from mindspore.common.tensor import Tensor
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class _ROISpatialData(metaclass=ABCMeta):
@@ -249,6 +249,7 @@ class HausdorffDistance(Metric):
self.y_edges = 0
self._is_update = False

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result 'y_pred', 'y' and 'label_idx'.


+ 2
- 1
mindspore/nn/metrics/loss.py View File

@@ -13,7 +13,7 @@
# limitations under the License.
# ============================================================================
"""Loss for evaluation"""
from .metric import Metric
from .metric import Metric, rearrange_inputs


class Loss(Metric):
@@ -40,6 +40,7 @@ class Loss(Metric):
self._sum_loss = 0
self._total_num = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result.


+ 2
- 1
mindspore/nn/metrics/mean_surface_distance.py View File

@@ -16,7 +16,7 @@
from scipy.ndimage import morphology
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class MeanSurfaceDistance(Metric):
@@ -84,6 +84,7 @@ class MeanSurfaceDistance(Metric):

return surface_distance

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result 'y_pred', 'y' and 'label_idx'.


+ 28
- 1
mindspore/nn/metrics/metric.py View File

@@ -14,12 +14,31 @@
# ============================================================================
"""Metric base class."""
from abc import ABCMeta, abstractmethod
import functools
import numpy as np
from mindspore.common.tensor import Tensor

_eval_types = {'classification', 'multilabel'}


def rearrange_inputs(func):
"""
This decorator is used to rearrange the inputs according to its indexes.

Args:
func (Callable): A candidate function to be wrapped whose input will be rearranged.

Returns:
Callable, used to exchange metadata between functions.
"""
@functools.wraps(func)
def wrapper(self, *inputs):
indexes = self.indexes
inputs = inputs if not indexes else [inputs[i] for i in indexes]
return func(self, *inputs)
return wrapper


class Metric(metaclass=ABCMeta):
"""
Base class of metric.
@@ -29,7 +48,7 @@ class Metric(metaclass=ABCMeta):
For examples of subclasses, please refer to the definition of class `MAE`, 'Recall' etc.
"""
def __init__(self):
pass
self._indexes = None

def _convert_data(self, data):
"""
@@ -96,6 +115,14 @@ class Metric(metaclass=ABCMeta):

return fps, tps, preds[threshold_idxs]

@property
def indexes(self):
return getattr(self, '_indexes', None)

def set_indexes(self, indexes):
self._indexes = indexes
return self

def __call__(self, *inputs):
"""
Evaluate input data once.


+ 2
- 1
mindspore/nn/metrics/occlusion_sensitivity.py View File

@@ -17,7 +17,7 @@ import numpy as np
from mindspore import nn
from mindspore.common.tensor import Tensor
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs

try:
from tqdm import trange
@@ -116,6 +116,7 @@ class OcclusionSensitivity(Metric):
return np.vstack(scores)
return np.vstack((sensitivity_im, scores))

@rearrange_inputs
def update(self, *inputs):
"""
Updates input, including `model`, `y_pred` and `label`.


+ 2
- 1
mindspore/nn/metrics/perplexity.py View File

@@ -16,7 +16,7 @@
import math
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class Perplexity(Metric):
@@ -59,6 +59,7 @@ class Perplexity(Metric):
self._sum_metric = 0.0
self._num_inst = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result: math:preds and :math:labels.


+ 2
- 1
mindspore/nn/metrics/precision.py View File

@@ -18,7 +18,7 @@ import sys
import numpy as np

from mindspore._checkparam import Validator as validator
from .metric import EvaluationBase
from .metric import EvaluationBase, rearrange_inputs


class Precision(EvaluationBase):
@@ -68,6 +68,7 @@ class Precision(EvaluationBase):
self._true_positives = 0
self._positives = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result with `y_pred` and `y`.


+ 2
- 1
mindspore/nn/metrics/recall.py View File

@@ -18,7 +18,7 @@ import sys
import numpy as np

from mindspore._checkparam import Validator as validator
from .metric import EvaluationBase
from .metric import EvaluationBase, rearrange_inputs


class Recall(EvaluationBase):
@@ -67,6 +67,7 @@ class Recall(EvaluationBase):
self._true_positives = 0
self._actual_positives = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result with `y_pred` and `y`.


+ 2
- 1
mindspore/nn/metrics/roc.py View File

@@ -15,7 +15,7 @@
"""ROC"""
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class ROC(Metric):
@@ -107,6 +107,7 @@ class ROC(Metric):

return y_pred, y, class_num, pos_label

@rearrange_inputs
def update(self, *inputs):
"""
Update state with predictions and targets.


+ 2
- 1
mindspore/nn/metrics/root_mean_square_surface_distance.py View File

@@ -16,7 +16,7 @@
from scipy.ndimage import morphology
import numpy as np
from mindspore._checkparam import Validator as validator
from .metric import Metric
from .metric import Metric, rearrange_inputs


class RootMeanSquareDistance(Metric):
@@ -86,6 +86,7 @@ class RootMeanSquareDistance(Metric):

return surface_distance

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result 'y_pred', 'y' and 'label_idx'.


+ 2
- 1
mindspore/nn/metrics/topk.py View File

@@ -14,7 +14,7 @@
# ============================================================================
"""Topk."""
import numpy as np
from .metric import Metric
from .metric import Metric, rearrange_inputs


class TopKCategoricalAccuracy(Metric):
@@ -57,6 +57,7 @@ class TopKCategoricalAccuracy(Metric):
self._correct_num = 0
self._samples_num = 0

@rearrange_inputs
def update(self, *inputs):
"""
Updates the internal evaluation result y_pred and y.


+ 12
- 0
tests/ut/python/metrics/test_accuracy.py View File

@@ -35,6 +35,18 @@ def test_classification_accuracy():
assert math.isclose(accuracy2, 2 / 3)


def test_classification_accuracy_indexes_awareness():
"""A indexes aware version of test_classification_accuracy"""
x = Tensor(np.array([[0.2, 0.5], [0.3, 0.1], [0.9, 0.6]]))
y = Tensor(np.array([1, 0, 1]))
y2 = Tensor(np.array([0, 0, 1]))
metric = Accuracy('classification').set_indexes([0, 2])
metric.clear()
metric.update(x, y, y2)
accuracy = metric.eval()
assert math.isclose(accuracy, 1 / 3)


def test_multilabel_accuracy():
x = Tensor(np.array([[0, 1, 0, 1], [1, 0, 1, 1], [0, 0, 0, 1]]))
y = Tensor(np.array([[0, 1, 1, 1], [0, 1, 1, 1], [0, 0, 0, 1]]))


+ 36
- 0
tests/ut/python/metrics/test_metric_factory.py View File

@@ -18,6 +18,7 @@ import numpy as np

from mindspore import Tensor
from mindspore.nn.metrics import get_metric_fn
from mindspore.nn.metrics.metric import rearrange_inputs


def test_classification_accuracy():
@@ -49,3 +50,38 @@ def test_classification_precision():
precision = metric.eval()

assert np.equal(precision, np.array([0.5, 1])).all()


class RearrangeInputsDemo:
def __init__(self):
self._indexes = None

@property
def indexes(self):
return getattr(self, '_indexes', None)

def set_indexes(self, indexes):
self._indexes = indexes
return self

@rearrange_inputs
def update(self, *inputs):
return inputs


def test_rearrange_inputs_without_arrange():
mini_decorator = RearrangeInputsDemo()
outs = mini_decorator.update(5, 9)
assert outs == (5, 9)


def test_rearrange_inputs_with_arrange():
mini_decorator = RearrangeInputsDemo().set_indexes([1, 0])
outs = mini_decorator.update(5, 9)
assert outs == (9, 5)


def test_rearrange_inputs_with_multi_inputs():
mini_decorator = RearrangeInputsDemo().set_indexes([1, 3])
outs = mini_decorator.update(0, 9, 0, 5)
assert outs == (9, 5)

+ 13
- 0
tests/ut/python/metrics/test_occlusion_sensitivity.py View File

@@ -46,6 +46,19 @@ def test_occlusion_sensitivity():
assert np.allclose(score, np.array([0.2, 0.2, 0.2, 0.2]))


def test_occlusion_sensitivity_indexes_awareness():
"""A indexes aware version of test_occlusion_sensitivity"""
test_data = np.array([[0.1, 0.2, 0.3, 0.4]]).astype(np.float32)
test_data2 = np.array([[0.2, 0.3, 0.1, 0.4]]).astype(np.float32)
label = np.array(1).astype(np.int32)
metric = OcclusionSensitivity().set_indexes([0, 2, 3])
metric.clear()
metric.update(model, test_data, test_data2, label)
score = metric.eval()

assert np.allclose(score, np.array([0.3, 0.3, 0.3, 0.3]))


def test_occlusion_sensitivity_update1():
"""test_occlusion_sensitivity_update1"""
test_data = np.array([[5, 8], [3, 2], [4, 2]])


+ 13
- 0
tests/ut/python/metrics/test_root_mean_square_distance.py View File

@@ -33,6 +33,19 @@ def test_root_mean_square_distance():
assert math.isclose(distance, 1.0000000000000002, abs_tol=0.001)


def test_root_mean_square_distance_indexes_awareness():
"""A indexes aware version of test_root_mean_square_distance"""
x = Tensor(np.array([[3, 0, 1], [1, 3, 0], [1, 0, 2]]))
y = Tensor(np.array([[0, 2, 1], [1, 2, 1], [0, 0, 1]]))
y2 = Tensor(np.array([[0, 0, 1], [0, 2, 1], [2, 0, 1]]))
metric = get_metric_fn('root_mean_square_distance').set_indexes([0, 2, 3])
metric.clear()
metric.update(x, y, y2, 0)
distance = metric.eval()

assert math.isclose(distance, 0.6666666666666669, abs_tol=0.001)


def test_root_mean_square_distance_update1():
x = Tensor(np.array([[0.2, 0.5, 0.7], [0.3, 0.1, 0.2], [0.9, 0.6, 0.5]]))
metric = RootMeanSquareDistance()


Loading…
Cancel
Save