Source

ITE / code / estimators / quick_tests / quick_test_KEJR2.m

Full commit
%function [] = quick_test_KEJR2()
%Quick test for exponentiated Jensen-Renyi kernel-2 estimators: analytical expression vs estimated value as a function of the sample number. In the test, normal variables are considered.

%Copyright (C) 2013 Zoltan Szabo ("http://www.gatsby.ucl.ac.uk/~szabo/", "zoltan (dot) szabo (at) gatsby (dot) ucl (dot) ac (dot) uk")
%
%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
%
%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
%
%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
%
%You should have received a copy of the GNU General Public License along with ITE. If not, see <http://www.gnu.org/licenses/>.

%clear start:
    clear all; close all;
    
%parameters:    
    distr = 'normal'; %fixed
    d = 1; %dimension of the distribution
    num_of_samples_v = [100:500:12*1000]; %sample numbers used for estimation
    u = 1; %>0, parameter of the Jensen-Renyi kernel
    %method, meta:    
    	cost_name = 'EJR2_DJR';
        
%initialization:
    L = length(num_of_samples_v);
    co = K_initialization(cost_name,1,{'alpha',2,'u',u}); %{'alpha',2,'u',u}:set the 'alpha' and 'u' fields. Note: there exist explicit formula in case of alpha = 2 for Jensen-Renyi divergence => to the Jensen-Renyi kernel too.
    num_of_samples_max = num_of_samples_v(end);
    K_hat_v = zeros(L,1); %vector of the estimated Jensen-Renyi kernel
    w = [1/2;1/2];%parameter of Jensen-Renyi divergence, fixed
    
%distr, d -> samples (Y1,Y2), analytical formula for the Jensen-Renyi kernel (K):
    switch distr    
        case 'normal'
            %generate samples (Y1,Y2):
                %Y1~N(m1,s1^2xI), Y2~N(m2,s2^2xI):
                   m1 = randn(d,1);  s1 = rand;
                   m2 = randn(d,1);  s2 = rand;
                Y1 = randn(d,num_of_samples_max) * s1 + repmat(m1,1,num_of_samples_max);
                Y2 = randn(d,num_of_samples_max) * s2 + repmat(m2,1,num_of_samples_max);

  	   %analytical value of Jensen-Renyi divergence (ref.: Fei Wang, Tanveer Syeda-Mahmood, Baba C. Vemuri, David Beymer, and Anand Rangarajan. Closed-Form Jensen-Renyi Divergence for Mixture of Gaussians and Applications to Group-Wise Shape Registration. Medical Image Computing and Computer-Assisted Intervention, 12: 648–655, 2009.)
	        ms = [m1,m2];
	        ss = [s1,s2];
	        term1 = compute_H2(w,ms,ss);
	        term2 = w(1) * compute_H2(1,m1,s1) + w(2) * compute_H2(1,m2,s2);
	        D  = term1 - term2; %H2(\sum_i wi Yi) - \sum_i w_i H2(Yi), where H2 is the quadratic Renyi entropy
        %analytical value of Jensen-Renyi kernel-2:
            K = exp(-u*D);
        otherwise
            error('Distribution=?');                 
    end
    
%estimation:
    Tk = 0;%index of the sample number examined
    for num_of_samples = num_of_samples_v
        Tk = Tk + 1;
        K_hat_v(Tk) = K_estimation(Y1(:,1:num_of_samples),Y2(:,1:num_of_samples),co);
        disp(strcat('Tk=',num2str(Tk),'/',num2str(L)));
    end
    
%plot:
    plot(num_of_samples_v,K_hat_v,'r',num_of_samples_v,ones(L,1)*K,'g');
    legend({'estimation','analytical value'});
    xlabel('Number of samples');
    ylabel('Jensen-Renyi kernel-2');