def estimation(self, y):
""" Estimate Shannon entropy.
Parameters
----------
y : (number of samples, dimension)-ndarray
One row of y corresponds to one sample.
Returns
-------
h : float
Estimated Shannon entropy.
References
----------
M. N. Goria, Nikolai N. Leonenko, V. V. Mergel, and P. L. Novi
Inverardi. A new class of random vector entropy estimators and its
applications in testing statistical hypotheses. Journal of
Nonparametric Statistics, 17: 277-297, 2005. (S={k})
Harshinder Singh, Neeraj Misra, Vladimir Hnizdo, Adam Fedorowicz
and Eugene Demchuk. Nearest neighbor estimates of entropy.
American Journal of Mathematical and Management Sciences, 23,
301-321, 2003. (S={k})
L. F. Kozachenko and Nikolai N. Leonenko. A statistical estimate
for the entropy of a random vector. Problems of Information
Transmission, 23:9-16, 1987. (S={1})
Examples
--------
h = co.estimation(y)
"""
num_of_samples, dim = y.shape
distances_yy = knn_distances(y, y, True, self.knn_method, self.k,
self.eps, 2)[0]
v = volume_of_the_unit_ball(dim)
distances_yy[:, self.k - 1][distances_yy[:, self.k-1] == 0] = 1e-6
h = log(num_of_samples - 1) - psi(self.k) + log(v) + \
dim * sum(log(distances_yy[:, self.k-1])) / num_of_samples
return h
评论列表
文章目录