Browse Source

Fixbug: modify GuidedBackprop API to make example executable

tags/v1.1.0
lixiaohui 5 years ago
parent
commit
a8eca53596
7 changed files with 38 additions and 15 deletions
  1. +3
    -3
      mindspore/explainer/_runner.py
  2. +4
    -2
      mindspore/explainer/benchmark/_attribution/faithfulness.py
  3. +4
    -1
      mindspore/explainer/benchmark/_attribution/localization.py
  4. +4
    -1
      mindspore/explainer/explanation/_attribution/_backprop/gradcam.py
  5. +5
    -1
      mindspore/explainer/explanation/_attribution/_backprop/gradient.py
  6. +11
    -5
      mindspore/explainer/explanation/_attribution/_backprop/modified_relu.py
  7. +7
    -2
      mindspore/explainer/explanation/_attribution/_perturbation/rise.py

+ 3
- 3
mindspore/explainer/_runner.py View File

@@ -161,7 +161,7 @@ class ExplainRunner:
Examples:
>>> from mindspore.explainer import ExplainRunner
>>> from mindspore.explainer.explanation import GuidedBackprop, Gradient
>>> from mindspore.nn import Sigmoid
>>> from mindspore.nn import Softmax
>>> from mindspore.train.serialization import load_checkpoint, load_param_into_net
>>> # Prepare the dataset for explaining and evaluation, e.g., Cifar10
>>> dataset = get_dataset('/path/to/Cifar10_dataset')
@@ -172,9 +172,9 @@ class ExplainRunner:
>>> load_param_into_net(net, param_dict)
>>> gbp = GuidedBackprop(net)
>>> gradient = Gradient(net)
>>> runner = ExplainRunner("./")
>>> explainers = [gbp, gradient]
>>> runner.run((dataset, classes), explainers, activation_fn=Sigmoid())
>>> # runner is an ExplainRunner object
>>> runner.run((dataset, classes), explainers, activation_fn=Softmax())
"""

check_value_type("dataset", dataset, tuple)


+ 4
- 2
mindspore/explainer/benchmark/_attribution/faithfulness.py View File

@@ -414,8 +414,10 @@ class Faithfulness(LabelSensitiveMetric):
numpy.ndarray, 1D array of shape :math:`(N,)`, result of faithfulness evaluated on `explainer`.

Examples:
>>> # init an explainer, the network should contain the output activation function.
>>> network = resnet50(20)
>>> import numpy as np
>>> import mindspore as ms
>>> from mindspore.explainer.explanation import Gradient
>>> # init an explainer with a trained network, e.g., resnet50
>>> gradient = Gradient(network)
>>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32)
>>> targets = 5


+ 4
- 1
mindspore/explainer/benchmark/_attribution/localization.py View File

@@ -110,7 +110,10 @@ class Localization(LabelSensitiveMetric):
numpy.ndarray, 1D array of shape :math:`(N,)`, result of localization evaluated on `explainer`.

Examples:
>>> # init an explainer with a trained network
>>> import numpy as np
>>> import mindspore as ms
>>> from mindspore.explainer.explanation import Gradient
>>> # init an explainer with a trained network, e.g., resnet50
>>> gradient = Gradient(network)
>>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32)
>>> masks = np.zeros([1, 1, 224, 224])


+ 4
- 1
mindspore/explainer/explanation/_attribution/_backprop/gradcam.py View File

@@ -66,7 +66,8 @@ class GradCAM(IntermediateLayerAttribution):

Examples:
>>> from mindspore.explainer.explanation import GradCAM
>>> net = resnet50(10)
>>> from mindspore.train.serialization import load_checkpoint, load_param_into_net
>>> network = resnet50(10) # please refer to model_zoo
>>> param_dict = load_checkpoint("resnet50.ckpt")
>>> load_param_into_net(net, param_dict)
>>> # specify a layer name to generate explanation, usually the layer can be set as the last conv layer.
@@ -111,6 +112,8 @@ class GradCAM(IntermediateLayerAttribution):
Tensor, a 4D tensor of shape :math:`(N, 1, H, W)`.

Examples:
>>> import mindspore as ms
>>> import numpy as np
>>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32)
>>> label = 5
>>> # gradcam is a GradCAM object, parse data and the target label to be explained and get the attribution


+ 5
- 1
mindspore/explainer/explanation/_attribution/_backprop/gradient.py View File

@@ -61,7 +61,9 @@ class Gradient(Attribution):

Examples:
>>> from mindspore.explainer.explanation import Gradient
>>> net = resnet50(10)
>>> from mindspore.train.serialization import load_checkpoint, load_param_into_net
>>> # init Gradient with a trained network
>>> net = resnet50(10) # please refer to model_zoo
>>> param_dict = load_checkpoint("resnet50.ckpt")
>>> load_param_into_net(net, param_dict)
>>> gradient = Gradient(net)
@@ -89,6 +91,8 @@ class Gradient(Attribution):
Tensor, a 4D tensor of shape :math:`(N, 1, H, W)`.

Examples:
>>> import mindspore as ms
>>> import numpy as np
>>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32)
>>> label = 5
>>> # gradient is a Gradient object, parse data and the target label to be explained and get the attribution


+ 11
- 5
mindspore/explainer/explanation/_attribution/_backprop/modified_relu.py View File

@@ -97,11 +97,14 @@ class Deconvolution(ModifiedReLU):
network (Cell): The black-box model to be explained.

Examples:
>>> import numpy as np
>>> import mindspore as ms
>>> from mindspore.explainer.explanation import Deconvolution
>>> net = resnet50(10)
>>> from mindspore.train.serialization import load_checkpoint, load_param_into_net
>>> net = resnet50(10) # please refer to model_zoo
>>> param_dict = load_checkpoint("resnet50.ckpt")
>>> load_param_into_net(net, param_dict)
>>> # init Gradient with a trained network.
>>> # init Deconvolution with a trained network.
>>> deconvolution = Deconvolution(net)
>>> # parse data and the target label to be explained and get the saliency map
>>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32)
@@ -132,14 +135,17 @@ class GuidedBackprop(ModifiedReLU):
network (Cell): The black-box model to be explained.

Examples:
>>> import numpy as np
>>> import mindspore as ms
>>> from mindspore.train.serialization import load_checkpoint, load_param_into_net
>>> from mindspore.explainer.explanation import GuidedBackprop
>>> net = resnet50(10)
>>> net = resnet50(10) # please refer to model_zoo
>>> param_dict = load_checkpoint("resnet50.ckpt")
>>> load_param_into_net(net, param_dict)
>>> # init Gradient with a trained network.
>>> # init GuidedBackprop with a trained network.
>>> gbp = GuidedBackprop(net)
>>> # parse data and the target label to be explained and get the saliency map
>>> inputs = ms.Tensor(np.random.rand([1, 3, 224, 224]), ms.float32)
>>> inputs = ms.Tensor(np.random.rand(1, 3, 224, 224), ms.float32)
>>> label = 5
>>> saliency = gbp(inputs, label)
"""


+ 7
- 2
mindspore/explainer/explanation/_attribution/_perturbation/rise.py View File

@@ -52,11 +52,14 @@ class RISE(PerturbationAttribution):

Examples:
>>> from mindspore.explainer.explanation import RISE
>>> net = resnet50(10)
>>> from mindspore.nn import Sigmoid
>>> from mindspore.train.serialization import load_checkpoint, load_param_into_net
>>> # init RISE with a trained network
>>> net = resnet50(10) # please refer to model_zoo
>>> param_dict = load_checkpoint("resnet50.ckpt")
>>> load_param_into_net(net, param_dict)
>>> # init RISE with specified activation function
>>> rise = RISE(net, activation_fn=nn.layer.Sigmoid())
>>> rise = RISE(net, activation_fn=Sigmoid())
"""

def __init__(self,
@@ -113,6 +116,8 @@ class RISE(PerturbationAttribution):
Tensor, a 4D tensor of shape :math:`(N, ?, H, W)` or :math:`(N, 1, H, W)`.

Examples:
>>> import mindspore as ms
>>> import numpy as np
>>> # given an instance of RISE, saliency map can be generate
>>> inputs = ms.Tensor(np.random.rand(2, 3, 224, 224), ms.float32)
>>> # when `targets` is an integer


Loading…
Cancel
Save