ITE / code / estimators / quick_tests / quick_test_DRenyi.m

%function [] = quick_test_DRenyi()
%Quick test for Renyi divergence estimators: analytical expression vs estimated value as a function of the sample number. In the test, normal variables are considered.

%Copyright (C) 2013 Zoltan Szabo ("", "zoltan (dot) szabo (at) gatsby (dot) ucl (dot) ac (dot) uk")
%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
%You should have received a copy of the GNU General Public License along with ITE. If not, see <>.

%clear start:
    clear all; close all;
    distr = 'normal'; %fixed
    alpha_D = 0.99; %parameter of Renyi divergence, \ne 1
    d = 1; %dimension of the distribution
    num_of_samples_v = [1000:1000:10*1000]; %sample numbers used for estimation
    %estimator (of Renyi divergence), base:
        cost_name = 'Renyi_kNN_k'; %d>=1
    num_of_samples_max = num_of_samples_v(end);
    L = length(num_of_samples_v);
    co = D_initialization(cost_name,1,{'alpha',alpha_D}); %{'alpha',alpha_D}: set the 'alpha' field
    D_hat_v = zeros(L,1); %vector of estimated Renyi divergences

%distr, d -> samples (Y1,Y2), analytical formula for the Renyi divergence (D_alpha):
    switch distr
        case 'normal'
                e2 = rand(d,1);
                e1 = e2;            
            %(random) linear transformation applied to the data:
                A2 = rand(d);
                A1 = rand * A2; %(e2,A2) => (e1,A1) choice guarantees Y1<<Y2 (in practise, too)
            %covariance matrix:
                cov1 = A1 * A1.';
                cov2 = A2 * A2.';
            %generate samples:
                Y1 = A1 * randn(d,num_of_samples_max) + repmat(e1,1,num_of_samples_max); %A1xN(0,I)+e1
                Y2 = A2 * randn(d,num_of_samples_max) + repmat(e2,1,num_of_samples_max); %A2xN(0,I)+e2
            %analytical value of Renyi divergence:
                mixcov = alpha_D * cov2 + (1-alpha_D) * cov1;
                diffe = e1 - e2;
                %Renyi divergence (Ref.: Manuel Gil. On Renyi Divergence Measures for Continuous Alphabet Sources. Phd Thesis, Queen’s University, 2011.):
                    D = alpha_D * ( 1/2 *diffe.' * inv(mixcov) * diffe  - 1 / (2*alpha_D*(alpha_D-1)) * log( abs(det(mixcov)) / (det(cov1)^(1-alpha_D) * det(cov2)^(alpha_D)) )); 
                %Kullback-Leibler divergence (verification, in the alpha_D->1 limit: KL div. = Renyi div.):
                    %D = 1/2 * ( log(det(cov2)/det(cov1)) + trace(inv(cov2)*cov1)  + diffe.' * inv(cov2) * diffe - d),
    Tk = 0;%index of the sample number examined
    for num_of_samples = num_of_samples_v
        Tk = Tk + 1;
        D_hat_v(Tk) = D_estimation(Y1(:,1:num_of_samples),Y2(:,1:num_of_samples),co);

    legend({'estimation','analytical value'});
    xlabel('Number of samples');
    ylabel('Renyi divergence');