ITE / code / H_I_D / base_estimators / HRenyi_kNN_1tok_initialization.m

function [co] = HRenyi_kNN_1tok_initialization(mult)
%Initialization of the kNN (k-nearest neighbor, S={1,...,k}) based Rényi entropy estimator.
%   1)The estimator is treated as a cost object (co).
%   2) We make use of the naming convention 'H<name>_initialization', to ease embedding new entropy estimation methods.
%   mult: is a multiplicative constant relevant (needed) in the estimation; '=1' means yes, '=0' no.
%   co: cost object (structure).
%Copyright (C) 2012 Zoltan Szabo ("", "szzoli (at) cs (dot) elte (dot) hu")
%This file is part of the ITE (Information Theoretical Estimators) Matlab/Octave toolbox.
%ITE is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
%the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
%This software is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
%MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for more details.
%You should have received a copy of the GNU General Public License along with ITE. If not, see <>.

%mandatory fields: = 'Renyi_kNN_1tok';
    co.mult = mult;
%other fields:
    %Possibilities for 'co.kNNmethod' (see 'kNN_squared_distances.m'): 
        %I: 'knnFP1': fast pairwise distance computation and C++ partial sort; parameter: co.k.                        
        %II: 'knnFP2': fast pairwise distance computation; parameter: co.k. 										
        %III: 'knnsearch' (Matlab Statistics Toolbox): parameters: co.k, co.NSmethod ('kdtree' or 'exhaustive').
        %IV: 'ANN' (approximate nearest neighbor); parameters: co.k, co.epsi.         
            co.kNNmethod = 'knnFP1';
            co.k = 3;%k-nearest neighbors
            %co.kNNmethod = 'knnFP2';
            %co.k = 3;%k-nearest neighbors
            %co.kNNmethod = 'knnsearch';
            %co.k = 3;%k-nearest neighbors
            %co.NSmethod = 'kdtree';
            %co.kNNmethod = 'ANN';
            %co.k = 3;%k-nearest neighbors
            %co.epsi = 0; %=0: exact kNN; >0: approximate kNN, the true (not squared) distances can not exceed the real distance more than a factor of (1+epsi).

    co.alpha = 0.99; %The Rényi entropy equals to the Shannon differential entropy, in limit, i.e., Renyi=H_{R,alpha} -> Shannon=H, provided that alpha ->1.

%initialize the ann wrapper in Octave, if needed: