From d8a4827f80b8e2af094a5410a0cfa00b6df2ca01 Mon Sep 17 00:00:00 2001 From: bingyaweng Date: Wed, 19 Aug 2020 15:00:25 +0800 Subject: [PATCH] add ut, st --- .../bnn_layers/conv_variational.py | 28 +++++++-------- .../bnn_layers/dense_variational.py | 22 +++++------- .../bnn_layers/layer_distribution.py | 36 +++++++++++-------- 3 files changed, 43 insertions(+), 43 deletions(-) diff --git a/mindspore/nn/probability/bnn_layers/conv_variational.py b/mindspore/nn/probability/bnn_layers/conv_variational.py index 4434fee9e0..cdd79a7cdf 100644 --- a/mindspore/nn/probability/bnn_layers/conv_variational.py +++ b/mindspore/nn/probability/bnn_layers/conv_variational.py @@ -61,6 +61,12 @@ class _ConvVariational(_Conv): raise ValueError('Attr \'pad_mode\' of \'Conv2d\' Op passed ' + str(pad_mode) + ', should be one of values in \'valid\', \'same\', \'pad\'.') + if not isinstance(stride, (int, tuple)): + raise TypeError('The type of `stride` should be `int` of `tuple`') + + if not isinstance(dilation, (int, tuple)): + raise TypeError('The type of `dilation` should be `int` of `tuple`') + # convolution args self.in_channels = in_channels self.out_channels = out_channels @@ -87,13 +93,10 @@ class _ConvVariational(_Conv): raise TypeError('The type of `weight_prior_fn` should be `NormalPrior`') self.weight_prior = weight_prior_fn() - if isinstance(weight_posterior_fn, Cell): - if weight_posterior_fn.__class__.__name__ != 'NormalPosterior': - raise TypeError('The type of `weight_posterior_fn` should be `NormalPosterior`') - else: - if weight_posterior_fn.__name__ != 'NormalPosterior': - raise TypeError('The type of `weight_posterior_fn` should be `NormalPosterior`') - self.weight_posterior = weight_posterior_fn(shape=self.shape, name='bnn_weight') + try: + self.weight_posterior = weight_posterior_fn(shape=self.shape, name='bnn_weight') + except TypeError: + raise TypeError('The type of `weight_posterior_fn` should be `NormalPosterior`') if self.has_bias: self.bias.requires_grad = False @@ -107,13 +110,10 @@ class _ConvVariational(_Conv): raise TypeError('The type of `bias_prior_fn` should be `NormalPrior`') self.bias_prior = bias_prior_fn() - if isinstance(bias_posterior_fn, Cell): - if bias_posterior_fn.__class__.__name__ != 'NormalPosterior': - raise TypeError('The type of `bias_posterior_fn` should be `NormalPosterior`') - else: - if bias_posterior_fn.__name__ != 'NormalPosterior': - raise TypeError('The type of `bias_posterior_fn` should be `NormalPosterior`') - self.bias_posterior = bias_posterior_fn(shape=[self.out_channels], name='bnn_bias') + try: + self.bias_posterior = bias_posterior_fn(shape=[self.out_channels], name='bnn_bias') + except TypeError: + raise TypeError('The type of `bias_posterior_fn` should be `NormalPosterior`') # mindspore operations self.bias_add = P.BiasAdd() diff --git a/mindspore/nn/probability/bnn_layers/dense_variational.py b/mindspore/nn/probability/bnn_layers/dense_variational.py index 4e28c125a8..81aa5abccb 100644 --- a/mindspore/nn/probability/bnn_layers/dense_variational.py +++ b/mindspore/nn/probability/bnn_layers/dense_variational.py @@ -51,13 +51,10 @@ class _DenseVariational(Cell): raise TypeError('The type of `weight_prior_fn` should be `NormalPrior`') self.weight_prior = weight_prior_fn() - if isinstance(weight_posterior_fn, Cell): - if weight_posterior_fn.__class__.__name__ != 'NormalPosterior': - raise TypeError('The type of `weight_posterior_fn` should be `NormalPosterior`') - else: - if weight_posterior_fn.__name__ != 'NormalPosterior': - raise TypeError('The type of `weight_posterior_fn` should be `NormalPosterior`') - self.weight_posterior = weight_posterior_fn(shape=[self.out_channels, self.in_channels], name='bnn_weight') + try: + self.weight_posterior = weight_posterior_fn(shape=[self.out_channels, self.in_channels], name='bnn_weight') + except TypeError: + raise TypeError('The type of `weight_posterior_fn` should be `NormalPosterior`') if self.has_bias: if isinstance(bias_prior_fn, Cell): @@ -69,13 +66,10 @@ class _DenseVariational(Cell): raise TypeError('The type of `bias_prior_fn` should be `NormalPrior`') self.bias_prior = bias_prior_fn() - if isinstance(bias_posterior_fn, Cell): - if bias_posterior_fn.__class__.__name__ != 'NormalPosterior': - raise TypeError('The type of `bias_posterior_fn` should be `NormalPosterior`') - else: - if bias_posterior_fn.__name__ != 'NormalPosterior': - raise TypeError('The type of `bias_posterior_fn` should be `NormalPosterior`') - self.bias_posterior = bias_posterior_fn(shape=[self.out_channels], name='bnn_bias') + try: + self.bias_posterior = bias_posterior_fn(shape=[self.out_channels], name='bnn_bias') + except TypeError: + raise TypeError('The type of `bias_posterior_fn` should be `NormalPosterior`') self.activation = activation if isinstance(self.activation, str): diff --git a/mindspore/nn/probability/bnn_layers/layer_distribution.py b/mindspore/nn/probability/bnn_layers/layer_distribution.py index f36e1dbbe5..02d84883ee 100644 --- a/mindspore/nn/probability/bnn_layers/layer_distribution.py +++ b/mindspore/nn/probability/bnn_layers/layer_distribution.py @@ -51,15 +51,16 @@ class NormalPosterior(Cell): Args: name (str): Name prepended to trainable parameter. - shape (list): Shape of the mean and standard deviation. + shape (list, tuple): Shape of the mean and standard deviation. dtype (class `mindspore.dtype`): The argument is used to define the data type of the output tensor. Default: mindspore.float32. - loc_mean ( float, array_like of floats): Mean of distribution to initialize trainable parameters. Default: 0. - loc_std ( float, array_like of floats): Standard deviation of distribution to initialize trainable parameters. - Default: 0.1. - untransformed_scale_mean ( float, array_like of floats): Mean of distribution to initialize trainable + loc_mean (int, float, array_like of floats): Mean of distribution to initialize trainable parameters. + Default: 0. + loc_std (int, float, array_like of floats): Standard deviation of distribution to initialize trainable + parameters. Default: 0.1. + untransformed_scale_mean (int, float, array_like of floats): Mean of distribution to initialize trainable parameters. Default: -5. - untransformed_scale_std ( float, array_like of floats): Standard deviation of distribution to initialize + untransformed_scale_std (int, float, array_like of floats): Standard deviation of distribution to initialize trainable parameters. Default: 0.1. Returns: @@ -80,20 +81,25 @@ class NormalPosterior(Cell): if not isinstance(shape, (tuple, list)): raise TypeError('The type of `shape` should be `tuple` or `list`') - if not (np.array(shape) > 0).all(): - raise ValueError('Negative dimensions are not allowed') + try: + mean_arr = np.random.normal(loc_mean, loc_std, shape) + except ValueError as msg: + raise ValueError(msg) + except TypeError as msg: + raise TypeError(msg) - if not (np.array(loc_std) >= 0).all(): - raise ValueError('The value of `loc_std` < 0') - if not (np.array(untransformed_scale_std) >= 0).all(): - raise ValueError('The value of `untransformed_scale_std` < 0') + try: + untransformed_scale_arr = np.random.normal(untransformed_scale_mean, untransformed_scale_std, shape) + except ValueError as msg: + raise ValueError(msg) + except TypeError as msg: + raise TypeError(msg) self.mean = Parameter( - Tensor(np.random.normal(loc_mean, loc_std, shape), dtype=dtype), name=name + '_mean') + Tensor(mean_arr, dtype=dtype), name=name + '_mean') self.untransformed_std = Parameter( - Tensor(np.random.normal(untransformed_scale_mean, untransformed_scale_std, shape), dtype=dtype), - name=name + '_untransformed_std') + Tensor(untransformed_scale_arr, dtype=dtype), name=name + '_untransformed_std') self.normal = Normal()