function I=quad_renyi(X,Y,xcov,ycov) % function I=quad_renyi(X,Y,xcov,ycov) % % Quadratic Renyi mutual information between X and Y. % Defines mutual information as the sum of the individual % entropies, minus the joint entropy. % % Assumes a Gaussian Parzen density estimate for both variables, % with a homoscedastic covariance. % % Currently, this requires that you have the same % number of samples for X and Y, because it assumes that aligned % samples are samples from the joint of p(x,y) % % D. Wingate 10/25/2006 % [dimx, nx] = size(X); [dimy, ny] = size(Y); if ( nx ~= ny ) error( 'Number of samples must be equal!' ); return; end; xcov = 2*xcov; ycov = 2*ycov; norm_x = 1/sqrt((2*pi*xcov)^dimx); norm_y = 1/sqrt((2*pi*ycov)^dimy); nsq = 1/(nx*nx); % entropy of X bX = rbf4nn( X, X, xcov ); sumX = sum( sum( bX ) ); HX = norm_x * nsq * sumX; % entropy of Y bY = rbf4nn( Y, Y, ycov ); sumY = sum( sum( bY ) ); HY = norm_y * nsq * sumY; % joint entropy bXY = bX .* bY; sumXY = sum( sum( bXY ) ); HXY = (norm_x*norm_y) * nsq * sumXY; I = -log(HX) -log(HY) + log(HXY); return;