function I=quad_tsallis(X,Y,xcov,ycov) % function I=quad_tsallis(X,Y,xcov,ycov) % % Quadratic Tsallis mutual information between X and Y. % Defines mutual information as the sum of the individual % entropies, minus the joint entropy. % % This can be written as I(X;Y) = H(X)+H(Y)-H(X)H(Y) - H(X,Y), % which simplifies to \int p(x,y)^2 - \int p(x)^2 \int p(y)^2. % % Assumes a Parzen density estimate for both variables, % with spherical, homoscedastic Gaussian kernels. % % Currently, this requires that you have the same % number of samples for X and Y, because it assumes that aligned % samples are samples from the joint of p(x,y) % % D. Wingate 10/25/2006 % [dimx, nx] = size(X); [dimy, ny] = size(Y); if ( nx ~= ny ) error( 'Number of samples must be equal!' ); return; end; xcov = 2*xcov; ycov = 2*ycov; norm_x = 1/sqrt((2*pi*xcov)^dimx); norm_y = 1/sqrt((2*pi*ycov)^dimy); nsq = 1/(nx*nx); % entropy of X bX = rbf4nn( X, X, xcov ); sumX = sum( sum( bX ) ); HX = norm_x * nsq * sumX; % entropy of Y bY = rbf4nn( Y, Y, ycov ); sumY = sum( sum( bY ) ); HY = norm_y * nsq * sumY; % joint entropy bXY = bX .* bY; sumXY = sum( sum( bXY ) ); HXY = (norm_x*norm_y) * nsq * sumXY; I = HXY - HX*HY; %fprintf( 'HX=%.4f HY=%.4f HX*HY=%.4f HXY=%.4f ->\t %.4f\n', HX, HY, ... % HX*HY, HXY, I ); return;