Skip to content

Commit

Permalink
updated demos for ch01~06
Browse files Browse the repository at this point in the history
  • Loading branch information
sth4nth committed Feb 20, 2016
1 parent 65aa517 commit f3b3095
Show file tree
Hide file tree
Showing 4 changed files with 75 additions and 72 deletions.
18 changes: 9 additions & 9 deletions chapter01/demo.m
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
% Done
% demo for information theory toolbox

% demos for ch01
clear;
k = 10; % variable range
n = 100; % number of variables
Expand All @@ -10,20 +10,20 @@
% x = randi(k,1,n); % need statistics toolbox
% y = randi(k,1,n);

%% entropy H(x), H(y)
%% Entropy H(x), H(y)
Hx = entropy(x);
Hy = entropy(y);
%% joint entropy H(x,y)
%% Joint entropy H(x,y)
Hxy = jointEntropy(x,y);
%% conditional entropy H(x|y)
%% Conditional entropy H(x|y)
Hx_y = condEntropy(x,y);
%% mutual information I(x,y)
%% Mutual information I(x,y)
Ixy = mutInfo(x,y);
%% relative entropy (KL divergence) KL(p(x)|p(y))
%% Relative entropy (KL divergence) KL(p(x)|p(y))
Dxy = relatEntropy(x,y);
%% normalized mutual information I_n(x,y)
%% Normalized mutual information I_n(x,y)
nIxy = nmi(x,y);
%% nomalized variation information I_v(x,y)
%% Nomalized variation information I_v(x,y)
vIxy = nvi(x,y);
%% H(x|y) = H(x,y)-H(y)
isequalf(Hx_y,Hxy-Hy)
Expand Down
5 changes: 2 additions & 3 deletions chapter03/demo.m
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
% Done
% demo for chapter 03
% demos for ch03
clear; close all;
d = 1;
n = 200;
[x,t] = linRnd(d,n);
%% linear regression
%% Linear regression
model = linReg(x,t);
plotBar(model,x,t);
%% Empirical Bayesian linear regression via EM
Expand Down
7 changes: 4 additions & 3 deletions chapter04/demo.m
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
% TODO: multiPlot: plot multiclass decison boundary
%
% demos for ch04

%% Logistic logistic regression for binary classification
clear; close all;
k = 2;
n = 1000;
Expand All @@ -8,7 +9,7 @@
plot(llh);
y = logitBinPred(model,X)+1;
binPlot(model,X,y)
%%
%% Logistic logistic regression for multiclass classification
clear
k = 3;
n = 1000;
Expand Down
117 changes: 60 additions & 57 deletions chapter06/demo.m
Original file line number Diff line number Diff line change
@@ -1,74 +1,77 @@
% demos for ch06


%% Kernel regression with gaussian kernel
clear; close all;
n = 100;
x = linspace(0,2*pi,n); % test data
t = sin(x)+rand(1,n)/2;

model = knReg(x,t,1e-4,@knGauss);
knRegPlot(model,x,t);

%% kernel regression with linear kernel is linear regression
% clear; close all;
% n = 100;
% x = linspace(0,2*pi,n); % test data
% t = sin(x)+rand(1,n)/2;
% lambda = 1e-4;
% model_kn = knReg(x,t,lambda,@knLin);
% model_lin = linReg(x,t,lambda);
%
% idx = 1:2:n;
% xt = x(:,idx);
% tt = t(idx);
%
% [y_kn, sigma_kn,p_kn] = knRegPred(model_kn,xt,tt);
% [y_lin, sigma_lin,p_lin] = linPred(model_lin,xt,tt);
%
% maxabsdiff(y_kn,y_lin)
% maxabsdiff(sigma_kn,sigma_lin)
% maxabsdiff(p_kn,p_lin)
%% kernel kmeans with linear kernel is kmeans
% clear; close all;
% d = 2;
% k = 3;
% n = 500;
% [X,y] = kmeansRnd(d,k,n);
% init = ceil(k*rand(1,n));
% [y_kn,en_kn,model_kn] = knKmeans(X,init,@knLin);
% [y_lin,en_lin,model_lin] = kmeans(X,init);
%
% idx = 1:2:n;
% Xt = X(:,idx);
%
% [t_kn,ent_kn] = knKmeansPred(model_kn, Xt);
% [t_lin,ent_lin] = kmeansPred(model_lin, Xt);
%
% maxabsdiff(y_kn,y_lin)
% maxabsdiff(en_kn,en_lin)
%
% maxabsdiff(t_kn,t_lin)
% maxabsdiff(ent_kn,ent_lin)
%% kernel PCA with linear kernel is PCA
%% Kernel regression with linear kernel is EQUIVALENT to linear regression
lambda = 1e-4;
model_kn = knReg(x,t,lambda,@knLin);
model_lin = linReg(x,t,lambda);

idx = 1:2:n;
xt = x(:,idx);
tt = t(idx);

[y_kn, sigma_kn,p_kn] = knRegPred(model_kn,xt,tt);
[y_lin, sigma_lin,p_lin] = linPred(model_lin,xt,tt);

maxdiff(y_kn,y_lin)
maxdiff(sigma_kn,sigma_lin)
maxdiff(p_kn,p_lin)
%% Kernel kmeans with linear kernel is EQUIVALENT to kmeans
clear; close all;
d = 2;
k = 3;
n = 500;
[X,y] = kmeansRnd(d,k,n);
init = ceil(k*rand(1,n));
[y_kn,en_kn,model_kn] = knKmeans(X,init,@knLin);
[y_lin,en_lin,model_lin] = kmeans(X,init);

idx = 1:2:n;
Xt = X(:,idx);

[t_kn,ent_kn] = knKmeansPred(model_kn, Xt);
[t_lin,ent_lin] = kmeansPred(model_lin, Xt);

maxdiff(y_kn,y_lin)
maxdiff(en_kn,en_lin)

maxdiff(t_kn,t_lin)
maxdiff(ent_kn,ent_lin)
%% Kernel PCA with linear kernel is EQUIVALENT TO PCA
clear; close all;
d = 10;
p = 2;
q = 2;
n = 500;
X = randn(d,n);

model_lin = pca(X,p);
model_kn = knPca(X,p,@knLin);

model_kn = knPca(X,q,@knLin);
idx = 1:2:n;
Xt = X(:,idx);
Y_lin = pcaPred(model_lin,Xt);

Y_kn = knPcaPred(model_kn,Xt);

R = Y_lin/Y_kn % the results are equivalent up to a rotation.

%% test case for knCenter
% clear; close all;
% kn = @knGauss;
% X=rand(2,100);
% X1=rand(2,10);
% X2=rand(2,5);
%
% isequalf(knCenter(kn,X,X1),diag(knCenter(kn,X,X1,X1)))
% isequalf(knCenter(kn,X),knCenter(kn,X,X,X));
[U,L,mu,err1] = pca(X,q);
Y_lin = U'*bsxfun(@minus,Xt,mu); % projection


R = Y_lin/Y_kn; % the results are equivalent up to a rotation.
maxdiff(R*R', eye(q))

%% demo for knCenter
clear; close all;
kn = @knGauss;
X=rand(2,100);
X1=rand(2,10);
X2=rand(2,5);

maxdiff(knCenter(kn,X,X1),diag(knCenter(kn,X,X1,X1)))
maxdiff(knCenter(kn,X),knCenter(kn,X,X,X))

0 comments on commit f3b3095

Please sign in to comment.