Source

ITE / code / estimators / quick_tests / quick_test_DKL.m

Full commit
%function [] = quick_test_DKL()
%Quick test for Kullback-Leibler divergence estimators: analytical expression vs estimated value as a function of the sample number. In the test, normal variables are considered. See also 'analytical_value_DKL.m'.

%Copyright (C) 2013 Zoltan Szabo ("http://www.gatsby.ucl.ac.uk/~szabo/", "zoltan (dot) szabo (at) gatsby (dot) ucl (dot) ac (dot) uk")
%
%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
%
%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
%
%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
%
%You should have received a copy of the GNU General Public License along with ITE. If not, see <http://www.gnu.org/licenses/>.

%clear start:
    clear all; close all;

%parameters:
    distr = 'normal'; %fixed
    d = 1; %dimension of the distribution
    num_of_samples_v = [1000:1000:10*1000]; %sample numbers used for estimation
    %estimator (of Kullback-Leibler divergence):
        %base:
            cost_name = 'KL_kNN_k';      %d>=1
            %cost_name = 'KL_kNN_kiTi';  %d>=1  
            %cost_name = 'KL_PSD_SzegoT'; %d=1
        %meta:
            %cost_name = 'KL_CCE_HShannon'; %d>=1
        
%initialization:    
    num_of_samples_max = num_of_samples_v(end);
    L = length(num_of_samples_v);
    co = D_initialization(cost_name,1);
    D_hat_v = zeros(L,1);

%distr, d -> samples (Y1,Y2), analytical formula for the Kullback-Leibler divergence (D):
    switch distr
        case 'normal'
            %expectation:
                e2 = rand(d,1);
                e1 = e2;
            %(random) linear transformation applied to the data:
                A2 = rand(d);
                A1 = rand * A2; %(e2,A2) => (e1,A1) choice guarantees Y1<<Y2 (in practise, too)
            %covariance matrix:
                cov1 = A1 * A1.';                    
                cov2 = A2 * A2.';     
            %generate samples:
                Y1 = A1 * randn(d,num_of_samples_max) + repmat(e1,1,num_of_samples_max); %A1xN(0,I)+e1
                Y2 = A2 * randn(d,num_of_samples_max) + repmat(e2,1,num_of_samples_max); %A2xN(0,I)+e2
            %analytical value of Kullback-Leibler divergence:
                par1.e = e1; par1.cov = cov1;
                par2.e = e2; par2.cov = cov2;
                D = analytical_value_DKL(distr,distr,par1,par2);
        otherwise
            error('Distribution=?');            
    end
    
%estimation:
    Tk = 0;%index of the sample number examined
    for num_of_samples = num_of_samples_v
        Tk = Tk + 1;
        D_hat_v(Tk) = D_estimation(Y1(:,1:num_of_samples),Y2(:,1:num_of_samples),co);
        disp(strcat('Tk=',num2str(Tk),'/',num2str(L)));
    end
    
%plot:
    plot(num_of_samples_v,D_hat_v,'r',num_of_samples_v,D*ones(L,1),'g');
    legend({'estimation','analytical value'});
    xlabel('Number of samples');
    ylabel('Kullback-Leibler divergence');