ITE / code / H_I_D / base_estimators / HShannon_spacing_V_estimation.m

function [H] = HShannon_spacing_V_estimation(Y,co)
%Estimates the Shannon entropy (H) of Y (Y(:,t) is the t^th sample) using Vasicek's spacing method. Cost parameters are provided in the cost object co.
%We make use of the naming convention 'H<name>_estimation', to ease embedding new entropy estimation methods.
%   Oldrich Vasicek. A test for normality based on sample entropy. Journal of the Royal Statistical Society, Series B, 38(1):5459, 1976.
%Copyright (C) 2012 Zoltan Szabo ("", "szzoli (at) cs (dot) elte (dot) hu")
%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
%You should have received a copy of the GNU General Public License along with ITE. If not, see <>.

[d,num_of_samples] = size(Y);

    if d~=1
        error('The samples must be one-dimensional for this estimator.');

m = floor(sqrt(num_of_samples));%m/num_of_samples->0, m,num_of_samples->infty; m<num_of_samples/2
Y_sorted = sort(Y);
Y_sorted = [repmat(Y_sorted(1),1,m),Y_sorted,repmat(Y_sorted(end),1,m)];
diffs = Y_sorted(2*m+1:num_of_samples+2*m) - Y_sorted(1:num_of_samples);
H = mean(log (num_of_samples / (2*m) * diffs));