-
Notifications
You must be signed in to change notification settings - Fork 1
/
computeEntropyGaussian.m
66 lines (55 loc) · 1.87 KB
/
computeEntropyGaussian.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
function [H, HconfInt] = computeEntropyGaussian(X, errorFormula, alpha)
% Compute mutual information of a Gaussian with confidence intervals based
% on formulas from Djauhari (2009) "Asymptotic Distributions of Sample
% Covariance Determinant"
% ----------------------------------------
% X - signal / neural data
% errorFormula - integer corresponding to the confidence interval formula to use
% alpha - alpha value to use with the confidence interval formula
% Possible values of "errorFormula":
% 0 - no confidence intervals
% 1 - formula from Corollary to Theorem 1 in the article
% 2 - formula from Theorem 2 in the article
if nargin<3
alpha = 0.05;
end
if nargin<2
errorFormula = 0;
end
% set parameters
nTr = size(X, 1);
nCh = size(X, 2);
% compute det(cov(R))
detX = det(cov(X));
% compute entropy H(R)
nlog2pie = nCh*log2(2*pi*exp(1));
H = 0.5*(nlog2pie+log2(detX));
% compute confidence intervals
switch errorFormula
% no confidence intervals
case 0
HconfInt = [nan, nan];
% formula from Corollary to Theorem 1 (simpler)
case 1
detMin = norminv(alpha/2, detX, sqrt(2*nCh/(nTr-1)*detX^2));
detMax = norminv(1-alpha/2, detX, sqrt(2*nCh/(nTr-1)*detX^2));
Hmin = 0.5*(nlog2pie+log2(detMin));
Hmax = 0.5*(nlog2pie+log2(detMax));
HconfInt = [Hmin, Hmax];
% formula from Theorem 2 (more precise)
case 2
if (errorFormula==2)
b1 = 1;
b2 = 1;
for i = 1:nCh
b1 = b1*(nTr-i)/(nTr-1);
b2 = b2*(nTr-i+2)/(nTr-1);
end
b2 = b1*(b2-b1);
end
detMin = norminv(alpha/2, detX/b1, sqrt(detX^2*b2/b1^2));
detMax = norminv(1-alpha/2, detX/b1, sqrt(detX^2*b2/b1^2));
Hmin = 0.5*(nlog2pie+log2(detMin));
Hmax = 0.5*(nlog2pie+log2(detMax));
HconfInt = [Hmin, Hmax];
end