def batch_log_pdf(self, x):
"""
Evaluates log probability density over one or a batch of samples.
Each of alpha and x can be either a single value or a batch of values batched along dimension 0.
If they are both batches, their batch sizes must agree.
In any case, the rightmost size must agree.
:param torch.autograd.Variable x: A value (if x.dim() == 1) or or batch of values (if x.dim() == 2).
:param alpha: A vector of concentration parameters.
:type alpha: torch.autograd.Variable or None.
:return: log probability densities of each element in the batch.
:rtype: torch.autograd.Variable of torch.Tensor of dimension 1.
"""
alpha = self.alpha.expand(self.shape(x))
x_sum = torch.sum(torch.mul(alpha - 1, torch.log(x)), -1)
beta = log_beta(alpha)
batch_log_pdf_shape = self.batch_shape(x) + (1,)
return (x_sum - beta).contiguous().view(batch_log_pdf_shape)
评论列表
文章目录