Source

ITE / code / H_I_D / meta_estimators / HShannon_DKL_U_estimation.m

Full commit
function [H] = HShannon_DKL_U_estimation(Y,co)
%Estimates the Shannon entropy (H) of Y (Y(:,t) is the t^th sample) using the relation H(Y) = -D(Y',U) + log(\prod_i(b_i-a_i)), where Y\in[a,b] = \times_{i=1}^d[a_i,b_i], D is the Kullback-Leibler divergence, Y' = linearly transformed version of Y to [0,1]^d, and U is the uniform distribution on [0,1]^d.
%This is a "meta" method, i.e., the Kullback-Leibler divergence estimator can be arbitrary.
%
%We make use of the naming convention 'H<name>_estimation', to ease embedding new entropy estimation methods.
%
%Copyright (C) 2012 Zoltan Szabo ("http://nipg.inf.elte.hu/szzoli", "szzoli (at) cs (dot) elte (dot) hu")
%
%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
%
%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
%
%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
%
%You should have received a copy of the GNU General Public License along with ITE. If not, see <http://www.gnu.org/licenses/>.

%co.mult:OK.

[d,num_of_samples] = size(Y); %dimension, number of samples

%estimate the support of y (a,b):
    a = min(Y,[],2);
    b = max(Y,[],2);
    
%transform y to [0,1]^d:
    Y2 = bsxfun(@plus,bsxfun(@rdivide,Y,b-a),a./(a-b));
    
%generate samples from U[0,1]^d:
    U = rand(d,num_of_samples);    
    
H = -D_estimation(Y2,U,co.member_co) + log(prod(b-a));