diff --git a/mindspore/nn/probability/dpn/vae/cvae.py b/mindspore/nn/probability/dpn/vae/cvae.py index 96a7d7b20ea..80e260a6ba2 100644 --- a/mindspore/nn/probability/dpn/vae/cvae.py +++ b/mindspore/nn/probability/dpn/vae/cvae.py @@ -22,15 +22,16 @@ from ....layer.basic import Dense, OneHot class ConditionalVAE(Cell): r""" - Conditional Variational auto-encoder (CVAE). + Conditional Variational Auto-Encoder (CVAE). The difference with VAE is that CVAE uses labels information. - see more details in ``. + see more details in `Learning Structured Output Representation using Deep Conditional Generative Models + `_. Note: When define the encoder and decoder, the shape of the encoder's output tensor and decoder's input tensor - should be math:`(N, hidden_size)`. + should be :math:`(N, hidden_size)`. The latent_size should be less than or equal to the hidden_size. Args: @@ -42,7 +43,7 @@ class ConditionalVAE(Cell): Inputs: - **input_x** (Tensor) - the same shape as the input of encoder. - - **input_y** (Tensor) - the tensor of the target data, the shape is math:`(N, 1)`. + - **input_y** (Tensor) - the tensor of the target data, the shape is :math:`(N, 1)`. Outputs: - **output** (tuple) - (recon_x(Tensor), x(Tensor), mu(Tensor), std(Tensor)). @@ -100,13 +101,13 @@ class ConditionalVAE(Cell): Args: sample_y (Tensor): Define the label of sample, int tensor. generate_nums (int): The number of samples to generate. - shape(tuple): The shape of sample, it should be math:`(generate_nums, C, H, W)` or math:`(-1, C, H, W)`. + shape(tuple): The shape of sample, it should be (generate_nums, C, H, W) or (-1, C, H, W). Returns: Tensor, the generated sample. """ generate_nums = check_int_positive(generate_nums) - if not isinstance(shape, tuple) or len(shape) != 4 or shape[0] != generate_nums or shape[0] != -1: + if not isinstance(shape, tuple) or len(shape) != 4 or (shape[0] != -1 and shape[0] != generate_nums): raise ValueError('The shape should be (generate_nums, C, H, W) or (-1, C, H, W).') sample_z = self.normal((generate_nums, self.latent_size), self.to_tensor(0.0), self.to_tensor(1.0), seed=0) sample_y = self.one_hot(sample_y) diff --git a/mindspore/nn/probability/dpn/vae/vae.py b/mindspore/nn/probability/dpn/vae/vae.py index 731a7608621..a6be6ddd8b4 100644 --- a/mindspore/nn/probability/dpn/vae/vae.py +++ b/mindspore/nn/probability/dpn/vae/vae.py @@ -22,14 +22,14 @@ from ....layer.basic import Dense class VAE(Cell): r""" - Variational auto-encoder (VAE). + Variational Auto-Encoder (VAE). The VAE defines a generative model, `Z` is sampled from the prior, then used to reconstruct `X` by a decoder. - see more details in `Auto-Encoding Variational Bayes`_. + see more details in `Auto-Encoding Variational Bayes `_. Note: When define the encoder and decoder, the shape of the encoder's output tensor and decoder's input tensor - should be math:`(N, hidden_size)`. + should be :math:`(N, hidden_size)`. The latent_size should be less than or equal to the hidden_size. Args: @@ -88,13 +88,13 @@ class VAE(Cell): Args: generate_nums (int): The number of samples to generate. - shape(tuple): The shape of sample, it should be math:`(generate_nums, C, H, W)` or math:`(-1, C, H, W)`. + shape(tuple): The shape of sample, it should be (generate_nums, C, H, W) or (-1, C, H, W). Returns: Tensor, the generated sample. """ generate_nums = check_int_positive(generate_nums) - if not isinstance(shape, tuple) or len(shape) != 4 or shape[0] != generate_nums or shape[0] != -1: + if not isinstance(shape, tuple) or len(shape) != 4 or (shape[0] != -1 and shape[0] != generate_nums): raise ValueError('The shape should be (generate_nums, C, H, W) or (-1, C, H, W).') sample_z = self.normal((generate_nums, self.latent_size), self.to_tensor(0.0), self.to_tensor(1.0), seed=0) sample = self._decode(sample_z) diff --git a/mindspore/nn/probability/infer/variational/elbo.py b/mindspore/nn/probability/infer/variational/elbo.py index 9eb573ddea1..7d431e37f1f 100644 --- a/mindspore/nn/probability/infer/variational/elbo.py +++ b/mindspore/nn/probability/infer/variational/elbo.py @@ -27,7 +27,7 @@ class ELBO(Cell): the posterior distribution. It maximizes the evidence lower bound (ELBO), a lower bound on the logarithm of the marginal probability of the observations log p(x). The ELBO is equal to the negative KL divergence up to an additive constant. - see more details in `Variational Inference: A Review for Statisticians`_. + see more details in `Variational Inference: A Review for Statisticians `_. Args: latent_prior(str): The prior distribution of latent space. Default: Normal. diff --git a/mindspore/nn/probability/infer/variational/svi.py b/mindspore/nn/probability/infer/variational/svi.py index f9c1b96f213..2613275ffe8 100644 --- a/mindspore/nn/probability/infer/variational/svi.py +++ b/mindspore/nn/probability/infer/variational/svi.py @@ -28,7 +28,7 @@ class SVI: Variational inference casts the inference problem as an optimization. Some distributions over the hidden variables that is indexed by a set of free parameters, and then optimize the parameters to make it closest to the posterior of interest. - see more details in `Variational Inference: A Review for Statisticians`_. + see more details in `Variational Inference: A Review for Statisticians `_. Args: net_with_loss(Cell): Cell with loss function. diff --git a/mindspore/nn/probability/toolbox/uncertainty_evaluation.py b/mindspore/nn/probability/toolbox/uncertainty_evaluation.py index f8a12f07abd..665ffcca10e 100644 --- a/mindspore/nn/probability/toolbox/uncertainty_evaluation.py +++ b/mindspore/nn/probability/toolbox/uncertainty_evaluation.py @@ -219,7 +219,7 @@ class EpistemicUncertaintyModel(Cell): after Dense layer or Conv layer, then use dropout during train and eval time. See more details in `Dropout as a Bayesian Approximation: Representing Model uncertainty in Deep Learning - `. + `_. """ def __init__(self, epi_model): @@ -257,7 +257,7 @@ class AleatoricUncertaintyModel(Cell): uncertainty, the loss function should be modified in order to add variance into loss. See more details in `What Uncertainties Do We Need in Bayesian Deep Learning for Computer Vision? - `. + `_. """ def __init__(self, ale_model, num_classes, task):