# HG changeset patch
# User Zoltan Szabo
# Date 1362559000 -3600
# Node ID e448f3f1fe3da130acf8ab5122646f3d2ad49736
# Parent 646985324047a3fecdf0820baf61902f0240d226
Two one-dimensional Shannon entropy estimator based on the maximum entropy method: added; see 'HShannon_MaxEnt1_initialization.m', 'HShannon_MaxEnt1_estimation.m', 'HShannon_MaxEnt2_initialization.m', 'HShannon_MaxEnt2_estimation.m'.
diff --git a/CHANGELOG.txt b/CHANGELOG.txt
--- a/CHANGELOG.txt
+++ b/CHANGELOG.txt
@@ -1,3 +1,6 @@
+v0.33 (Mar 6, 2013):
+-Two one-dimensional Shannon entropy estimator based on the maximum entropy method: added; see 'HShannon_MaxEnt1_initialization.m', 'HShannon_MaxEnt1_estimation.m', 'HShannon_MaxEnt2_initialization.m', 'HShannon_MaxEnt2_estimation.m'.
+
v0.32 (Feb 25, 2013):
-ICA and ISA structures: introduced for unified treatment of the estimators. It will also enable embedding of general ICA optimization algorithms such as the Jacobi method.
-Some variables: renamed; see ARmethod_parameters -> AR, ARXmethod_parameters -> ARX, fARmethod_parameters -> fAR, MAparameters -> MA, gaussianizationmethod_parameters -> gaussianization.
diff --git a/README.md b/README.md
--- a/README.md
+++ b/README.md
@@ -37,7 +37,7 @@
**Download** the latest release:
-- code: [zip](https://bitbucket.org/szzoli/ite/downloads/ITE-0.32_code.zip), [tar.bz2](https://bitbucket.org/szzoli/ite/downloads/ITE-0.32_code.tar.bz2),
-- [documentation (pdf)](https://bitbucket.org/szzoli/ite/downloads/ITE-0.32_documentation.pdf).
+- code: [zip](https://bitbucket.org/szzoli/ite/downloads/ITE-0.33_code.zip), [tar.bz2](https://bitbucket.org/szzoli/ite/downloads/ITE-0.33_code.tar.bz2),
+- [documentation (pdf)](https://bitbucket.org/szzoli/ite/downloads/ITE-0.33_documentation.pdf).
diff --git a/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt1_estimation.m b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt1_estimation.m
new file mode 100644
--- /dev/null
+++ b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt1_estimation.m
@@ -0,0 +1,53 @@
+function [H] = HShannon_MaxEnt1_estimation(Y,co)
+%Estimates the Shannon entropy (H) of Y using the maximum entropy distribution method. The used Gi functions are G1(x) = x exp(-x^2/2) and G2(x) = abs(x).
+%
+%We use the naming convention 'H_estimation' to ease embedding new entropy estimation methods.
+%
+%INPUT:
+% Y: Y(:,t) is the t^th sample.
+% co: entropy estimator object.
+%
+%REFERENCE:
+% Aapo Hyvarinen. New approximations of differential entropy for independent component analysis and projection pursuit. In Advances in Neural Information Processing Systems (NIPS), pages 273-279, 1997. (entropy approximation based on the maximum entropy distribution)
+% Thomas M. Cover and Joy A. Thomas. Elements of Information Theory. John Wiley and Sons, New York, USA, 1991. (maximum entropy distribution)
+%
+%Copyright (C) 2012 Zoltan Szabo ("http://nipg.inf.elte.hu/szzoli", "szzoli (at) cs (dot) elte (dot) hu")
+%
+%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
+%
+%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
+%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
+%
+%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+%
+%You should have received a copy of the GNU General Public License along with ITE. If not, see .
+
+%co.mult:OK.
+
+[d,num_of_samples] = size(Y);
+
+%verification:
+ if d~=1
+ error('The samples must be one-dimensional for this estimator.');
+ end
+
+%normalize Y to have zero mean and unit std:
+ Y = whiten_E0(Y);%E=0, this step does not change the Shannon entropy of the variable
+ %std(Y)=1:
+ s = sqrt(sum(Y.^2)/(num_of_samples-1));%= std(Y,[],2)
+ Y = Y / s;
+ H_whiten = log(s);%we will take this scaling into account via the entropy transformation rule [ H(wz) = H(z)+log(|w|) ] at the end
+
+%H1,H2 -> H:
+ H1 = ( 1+log(2*pi) ) / 2; %=H[N(0,1)]
+ %H2:
+ k1 = 36 / ( 8*sqrt(3) - 9 );
+ k2a = 1 / ( 2 - 6/pi );
+ H2 = k1 * mean(Y .* exp(-Y.^2/2))^2 + k2a * (mean(abs(Y)) - sqrt(2/pi))^2;
+ H = H1 - H2;
+
+%take into account the 'std=1' pre-processing:
+ H = H + H_whiten;
+
+
diff --git a/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt1_initialization.m b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt1_initialization.m
new file mode 100644
--- /dev/null
+++ b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt1_initialization.m
@@ -0,0 +1,29 @@
+function [co] = HShannon_MaxEnt1_initialization(mult)
+%Initialization of the maximum entropy distribution based Shannon differential
+%entropy (H) estimator. The used Gi functions are G1(x) = x exp(-x^2/2) and G2(x) = abs(x).
+%
+%Note:
+% 1)The estimator is treated as a cost object (co).
+% 2)We use the naming convention 'H_initialization' to ease embedding new entropy estimation methods.
+%
+%INPUT:
+% mult: is a multiplicative constant relevant (needed) in the estimation; '=1' means yes, '=0' no.
+%OUTPUT:
+% co: cost object (structure).
+%
+%Copyright (C) 2012 Zoltan Szabo ("http://nipg.inf.elte.hu/szzoli", "szzoli (at) cs (dot) elte (dot) hu")
+%
+%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
+%
+%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
+%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
+%
+%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+%
+%You should have received a copy of the GNU General Public License along with ITE. If not, see .
+
+%mandatory fields:
+ co.name = 'Shannon_MaxEnt1';
+ co.mult = mult;
+
diff --git a/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt2_estimation.m b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt2_estimation.m
new file mode 100644
--- /dev/null
+++ b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt2_estimation.m
@@ -0,0 +1,53 @@
+function [H] = HShannon_MaxEnt2_estimation(Y,co)
+%Estimates the Shannon entropy (H) of Y using the maximum entropy distribution method. The used Gi functions are G1(x) = x exp(-x^2/2) and G2(x) = exp(-x^2/2).
+%
+%We use the naming convention 'H_estimation' to ease embedding new entropy estimation methods.
+%
+%INPUT:
+% Y: Y(:,t) is the t^th sample.
+% co: entropy estimator object.
+%
+%REFERENCE:
+% Aapo Hyvarinen. New approximations of differential entropy for independent component analysis and projection pursuit. In Advances in Neural Information Processing Systems (NIPS), pages 273-279, 1997. (entropy approximation based on the maximum entropy distribution)
+% Thomas M. Cover and Joy A. Thomas. Elements of Information Theory. John Wiley and Sons, New York, USA, 1991. (maximum entropy distribution)
+%
+%Copyright (C) 2012 Zoltan Szabo ("http://nipg.inf.elte.hu/szzoli", "szzoli (at) cs (dot) elte (dot) hu")
+%
+%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
+%
+%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
+%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
+%
+%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+%
+%You should have received a copy of the GNU General Public License along with ITE. If not, see .
+
+%co.mult:OK.
+
+[d,num_of_samples] = size(Y);
+
+%verification:
+ if d~=1
+ error('The samples must be one-dimensional for this estimator.');
+ end
+
+%normalize Y to have zero mean and unit std:
+ Y = whiten_E0(Y);%E=0, this step does not change the Shannon entropy of the variable
+ %std(Y)=1:
+ s = sqrt(sum(Y.^2)/(num_of_samples-1));%= std(Y,[],2)
+ Y = Y / s;
+ H_whiten = log(s);%we will take this scaling into account via the entropy transformation rule [ H(wz) = H(z)+log(|w|) ] at the end
+
+%H1,H2 -> H:
+ H1 = ( 1+log(2*pi) ) / 2; %=H[N(0,1)]
+ %H2:
+ k1 = 36 / ( 8*sqrt(3) - 9 );
+ k2b = 24 / ( 16*sqrt(3) - 27 );
+ H2 = k1 * mean(Y .* exp(-Y.^2/2))^2 + k2b * (mean(exp(-Y.^2/2)) - sqrt(1/2))^2;
+ H = H1 - H2;
+
+%take into account the 'std=1' pre-processing:
+ H = H + H_whiten;
+
+
diff --git a/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt2_initialization.m b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt2_initialization.m
new file mode 100644
--- /dev/null
+++ b/code/H_I_D_A_C/base_estimators/HShannon_MaxEnt2_initialization.m
@@ -0,0 +1,29 @@
+function [co] = HShannon_MaxEnt2_initialization(mult)
+%Initialization of the maximum entropy distribution based Shannon differential
+%entropy (H) estimator. The used Gi functions are G1(x) = x exp(-x^2/2) and G2(x) = exp(-x^2/2).
+%
+%Note:
+% 1)The estimator is treated as a cost object (co).
+% 2)We use the naming convention 'H_initialization' to ease embedding new entropy estimation methods.
+%
+%INPUT:
+% mult: is a multiplicative constant relevant (needed) in the estimation; '=1' means yes, '=0' no.
+%OUTPUT:
+% co: cost object (structure).
+%
+%Copyright (C) 2012 Zoltan Szabo ("http://nipg.inf.elte.hu/szzoli", "szzoli (at) cs (dot) elte (dot) hu")
+%
+%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
+%
+%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
+%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
+%
+%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
+%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+%
+%You should have received a copy of the GNU General Public License along with ITE. If not, see .
+
+%mandatory fields:
+ co.name = 'Shannon_MaxEnt2';
+ co.mult = mult;
+