def estimation(self, y):
""" Estimate Shannon entropy.
Parameters
----------
y : (number of samples, dimension)-ndarray
One row of y corresponds to one sample.
Returns
-------
h : float
Estimated Shannon entropy.
References
----------
Quing Wang, Sanjeev R. Kulkarni, and Sergio Verdu. Universal
estimation of information measures for analog sources. Foundations
And Trends In Communications And Information Theory, 5:265-353,
2009.
Examples
--------
h = co.estimation(y,ds)
"""
num_of_samples, dim = y.shape # number of samples, dimension
# estimate the mean and the covariance of y:
m = mean(y, axis=0)
c = cov(y, rowvar=False) # 'rowvar=False': 1 row = 1 observation
# entropy of N(m,c):
if dim == 1:
det_c = c # det(): 'expected square matrix' exception
# multivariate_normal(): 'cov must be 2 dimensional and square'
# exception:
c = array([[c]])
else:
det_c = det(c)
h_normal = 1/2 * log((2*pi*exp(1))**dim * det_c)
# generate samples from N(m,c):
y_normal = multivariate_normal(m, c, num_of_samples)
h = h_normal - self.kl_co.estimation(y, y_normal)
return h
评论列表
文章目录