1. Zoltán Szabó
  2. ITE


ITE / code / H_I_D / meta_estimators / IShannon_HShannon_estimation.m

function [I] = IShannon_HShannon_estimation(Y,ds,co)
%Estimates Shannon mutual information (I) making use of an estimator for Shannon differential entropy; co is the cost object.
%This is a  "meta" method, using the relation: I(y^1,...,y^M) = \sum_{m=1}^MH(y^m)  - H([y^1,...,y^M]).
%We make use of the naming convention 'I<name>_estimation', to ease embedding new mutual information estimation methods.
%   Y: Y(:,t) is the t^th sample.
%  ds: subspace dimensions.
%  co: mutual information estimator object.
%	Thomas M. Cover, Joy A. Thomas. Elements of Information Theory, John Wiley and Sons, New York, USA (1991).
%Copyright (C) 2012 Zoltan Szabo ("http://nipg.inf.elte.hu/szzoli", "szzoli (at) cs (dot) elte (dot) hu")
%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
%You should have received a copy of the GNU General Public License along with ITE. If not, see <http://www.gnu.org/licenses/>.

    num_of_comps = length(ds);%number of subspaces=:M
    cum_ds = cumsum([1;ds(1:end-1)]);%1,d_1+1,d_1+d_2+1,...,d_1+...+d_{M-1}+1 = starting indices of the subspaces

%I = - H([y^1,...,y^M]):
    I = -H_estimation(Y,co.member_co);
%I = I + \sum_{m=1}^MH(y^m):    
    for k = 1 : num_of_comps
        idx = [cum_ds(k) : cum_ds(k)+ds(k)-1];
        I = I + H_estimation(Y(idx,:),co.member_co);