def entropy(self, alpha=0.05, sem_tol=1e-3, mc_size=1000):
'''
Estimates the entropy of the mixed vine.
Parameters
----------
alpha : float, optional
Significance level of the entropy estimate. (Default: 0.05)
sem_tol : float, optional
Maximum standard error as a stopping criterion. (Default: 1e-3)
mc_size : integer, optional
Number of samples that are drawn in each iteration of the Monte
Carlo estimation. (Default: 1000)
Returns
-------
ent : float
Estimate of the mixed vine entropy in bits.
sem : float
Standard error of the mixed vine entropy estimate in bits.
'''
# Gaussian confidence interval for sem_tol and level alpha
conf = norm.ppf(1 - alpha)
sem = np.inf
ent = 0.0
var_sum = 0.0
k = 0
while sem >= sem_tol:
# Generate samples
samples = self.rvs(mc_size)
logp = self.logpdf(samples)
log2p = logp[np.isfinite(logp)] / np.log(2)
k += 1
# Monte-Carlo estimate of entropy
ent += (-np.mean(log2p) - ent) / k
# Estimate standard error
var_sum += np.sum((-log2p - ent) ** 2)
sem = conf * np.sqrt(var_sum / (k * mc_size * (k * mc_size - 1)))
return ent, sem
评论列表
文章目录