%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%INFORMATION THEORY ENTROPY
COMPUTATION
%
%Rebecca Kleinberger
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%
%
[sig fssig]=wavread('sound/speech/Proustraw.wav');
sig = sig(:,1);
data = sig;
%data
= [1 6 5,4 0 0,850876 6 9 3 2 7 6 9];
Min = min(data);
Max = max(data);
step = (Max-Min)/1000;
xRange = Min:step:Max ;
N = hist(data,xRange); %# Bin the data
p = N./numel(data);
plot(xRange,p); %# Plot the probabilities for each integer
xlabel('Integer value');
ylabel('Probability');
H=0;
for i=1:length(p)
if (p(i)~=0)
H =
H-p(i)*log2(p(i));
end
end
fprintf('entropy=%g bit/sample\n',H);