def _calc_entropy(self, thetas_ss, n_acc, k):
"""Calculate the entropy as described in Nunes & Balding, 2010.
E = log( pi^(q/2) / gamma(q/2+1) ) - digamma(k) + log(n)
+ q/n * sum_{i=1}^n( log(R_{i, k}) ), where
R_{i, k} is the Euclidean distance from the parameter theta_i to
its kth nearest neighbour;
q is the dimensionality of the parameter; and
n is the number of the accepted parameters n_acc in the rejection sampling.
Parameters
----------
thetas_ss : array_like
Parameters accepted upon the rejection sampling using
the summary-statistics combination ss.
n_acc : int
Number of the accepted parameters.
k : int
Nearest neighbour to be searched.
Returns
-------
float
Entropy.
"""
q = thetas_ss.shape[1]
# Calculate the distance to the kth nearest neighbour across all accepted parameters.
searcher_knn = cKDTree(thetas_ss)
sum_log_dist_knn = 0
for theta_ss in thetas_ss:
dist_knn = searcher_knn.query(theta_ss, k=k)[0][-1]
sum_log_dist_knn += np.log(dist_knn)
# Calculate the entropy.
E = np.log(np.pi**(q / 2) / gamma((q / 2) + 1)) - digamma(k) \
+ np.log(n_acc) + (q / n_acc) * sum_log_dist_knn
return E
评论列表
文章目录