Skip to content

Commit

Permalink
extracted demos from ch01~08
Browse files Browse the repository at this point in the history
  • Loading branch information
sth4nth committed Mar 19, 2016
1 parent cfa7812 commit 18ad852
Show file tree
Hide file tree
Showing 23 changed files with 335 additions and 232 deletions.
32 changes: 0 additions & 32 deletions chapter03/demo.m

This file was deleted.

19 changes: 0 additions & 19 deletions chapter04/demo.m

This file was deleted.

37 changes: 0 additions & 37 deletions chapter07/demo1.m

This file was deleted.

3 changes: 0 additions & 3 deletions chapter01/demo.m → demo/ch01/info_demo.m
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@
x = ceil(k*rand(1,n));
y = ceil(k*rand(1,n));

% x = randi(k,1,n); % need statistics toolbox
% y = randi(k,1,n);

%% Entropy H(x), H(y)
Hx = entropy(x);
Hy = entropy(y);
Expand Down
14 changes: 14 additions & 0 deletions demo/ch03/linRegEm_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
% demos for ch03
clear; close all;
d = 1;
n = 200;
[x,t] = linRnd(d,n);
%% Empirical Bayesian linear regression via EM
[model,llh] = linRegEm(x,t);
plot(llh);
[y,sigma] = linRegPred(model,x,t);
figure
plotCurveBar(x,y,sigma);
hold on;
plot(x,t,'o');
hold off;
16 changes: 16 additions & 0 deletions demo/ch03/linRegFp_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
% demos for ch03
clear; close all;
d = 1;
n = 200;
[x,t] = linRnd(d,n);
%% Empirical Bayesian linear regression via Mackay fix point iteration method
[model,llh] = linRegFp(x,t);
plot(llh);
[y,sigma] = linRegPred(model,x,t);
figure
plotCurveBar(x,y,sigma);
hold on;
plot(x,t,'o');
hold off;
%%

12 changes: 12 additions & 0 deletions demo/ch03/linReg_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
% demos for ch03
clear; close all;
d = 1;
n = 200;
[x,t] = linRnd(d,n);
%% Linear regression
model = linReg(x,t);
[y,sigma] = linRegPred(model,x,t);
plotCurveBar( x, y, sigma );
hold on;
plot(x,t,'o');
hold off;
11 changes: 11 additions & 0 deletions demo/ch04/logitBin_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
% demos for ch04

%% Logistic logistic regression for binary classification
clear; close all;
k = 2;
n = 1000;
[X,t] = kmeansRnd(2,k,n);
[model, llh] = logitBin(X,t-1);
plot(llh);
y = logitBinPred(model,X)+1;
binPlot(model,X,y)
8 changes: 8 additions & 0 deletions demo/ch04/logitMn_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
%% Logistic logistic regression for multiclass classification
clear
k = 3;
n = 1000;
[X,t] = kmeansRnd(2,k,n);
[model, llh] = logitMn(X,t);
y = logitMnPred(model,X);
plotClass(X,y)
File renamed without changes.
9 changes: 9 additions & 0 deletions demo/ch06/knCenter_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
%% demo for knCenter
clear; close all;
kn = @knGauss;
X=rand(2,100);
X1=rand(2,10);
X2=rand(2,5);

maxdiff(knCenter(kn,X,X1),diag(knCenter(kn,X,X1,X1))')
maxdiff(knCenter(kn,X),knCenter(kn,X,X,X))
141 changes: 61 additions & 80 deletions chapter06/demo.m → demo/ch06/knLin_demo.m
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,80 +1,61 @@
demos for ch06


%% Kernel regression with gaussian kernel
clear; close all;
n = 100;
x = linspace(0,2*pi,n); % test data
t = sin(x)+rand(1,n)/2;
model = knReg(x,t,1e-4,@knGauss);
[y,s] = knRegPred(model,x);
plotCurveBar(x,y,s);
hold on;
plot(x,t,'o');
hold off;
%% Kernel regression with linear kernel is EQUIVALENT to linear regression
lambda = 1e-4;
model_kn = knReg(x,t,lambda,@knLin);
model_lin = linReg(x,t,lambda);

idx = 1:2:n;
xt = x(:,idx);
tt = t(idx);

[y_kn, sigma_kn,p_kn] = knRegPred(model_kn,xt,tt);
[y_lin, sigma_lin,p_lin] = linRegPred(model_lin,xt,tt);

maxdiff(y_kn,y_lin)
maxdiff(sigma_kn,sigma_lin)
maxdiff(p_kn,p_lin)
%% Kernel kmeans with linear kernel is EQUIVALENT to kmeans
clear; close all;
d = 2;
k = 3;
n = 500;
[X,y] = kmeansRnd(d,k,n);
init = ceil(k*rand(1,n));
[y_kn,en_kn,model_kn] = knKmeans(X,init,@knLin);
[y_lin,en_lin,model_lin] = kmeans(X,init);

idx = 1:2:n;
Xt = X(:,idx);

[t_kn,ent_kn] = knKmeansPred(model_kn, Xt);
[t_lin,ent_lin] = kmeansPred(model_lin, Xt);

maxdiff(y_kn,y_lin)
maxdiff(en_kn,en_lin)

maxdiff(t_kn,t_lin)
maxdiff(ent_kn,ent_lin)
%% Kernel PCA with linear kernel is EQUIVALENT TO PCA
clear; close all;
d = 10;
q = 2;
n = 500;
X = randn(d,n);


model_kn = knPca(X,q,@knLin);
idx = 1:2:n;
Xt = X(:,idx);

Y_kn = knPcaPred(model_kn,Xt);

[U,L,mu,mse] = pca(X,q);
Y_lin = U'*bsxfun(@minus,Xt,mu); % projection


R = Y_lin/Y_kn; % the results are equivalent up to a rotation.
maxdiff(R*R', eye(q))

%% demo for knCenter
clear; close all;
kn = @knGauss;
X=rand(2,100);
X1=rand(2,10);
X2=rand(2,5);

maxdiff(knCenter(kn,X,X1),diag(knCenter(kn,X,X1,X1))')
maxdiff(knCenter(kn,X),knCenter(kn,X,X,X))
%% Kernel regression with linear kernel is EQUIVALENT to linear regression
clear; close all;
n = 100;
x = linspace(0,2*pi,n); % test data
t = sin(x)+rand(1,n)/2;

lambda = 1e-4;
model_kn = knReg(x,t,lambda,@knLin);
model_lin = linReg(x,t,lambda);

idx = 1:2:n;
xt = x(:,idx);
tt = t(idx);

[y_kn, sigma_kn,p_kn] = knRegPred(model_kn,xt,tt);
[y_lin, sigma_lin,p_lin] = linRegPred(model_lin,xt,tt);

maxdiff(y_kn,y_lin)
maxdiff(sigma_kn,sigma_lin)
maxdiff(p_kn,p_lin)
%% Kernel kmeans with linear kernel is EQUIVALENT to kmeans
clear; close all;
d = 2;
k = 3;
n = 500;
[X,y] = kmeansRnd(d,k,n);
init = ceil(k*rand(1,n));
[y_kn,en_kn,model_kn] = knKmeans(X,init,@knLin);
[y_lin,en_lin,model_lin] = kmeans(X,init);

idx = 1:2:n;
Xt = X(:,idx);

[t_kn,ent_kn] = knKmeansPred(model_kn, Xt);
[t_lin,ent_lin] = kmeansPred(model_lin, Xt);

maxdiff(y_kn,y_lin)
maxdiff(en_kn,en_lin)

maxdiff(t_kn,t_lin)
maxdiff(ent_kn,ent_lin)
%% Kernel PCA with linear kernel is EQUIVALENT TO PCA
clear; close all;
d = 10;
q = 2;
n = 500;
X = randn(d,n);


model_kn = knPca(X,q,@knLin);
idx = 1:2:n;
Xt = X(:,idx);

Y_kn = knPcaPred(model_kn,Xt);

[U,L,mu,mse] = pca(X,q);
Y_lin = U'*bsxfun(@minus,Xt,mu); % projection


R = Y_lin/Y_kn; % the results are equivalent up to a rotation.
maxdiff(R*R', eye(q))
14 changes: 14 additions & 0 deletions demo/ch06/knReg_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
% demos for ch06


%% Kernel regression with gaussian kernel
clear; close all;
n = 100;
x = linspace(0,2*pi,n); % test data
t = sin(x)+rand(1,n)/2;
model = knReg(x,t,1e-4,@knGauss);
[y,s] = knRegPred(model,x);
plotCurveBar(x,y,s);
hold on;
plot(x,t,'o');
hold off;
12 changes: 12 additions & 0 deletions demo/ch07/rvmBinEm_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
%% RVM for classification
clear; close all
k = 2;
d = 2;
n = 1000;
[X,t] = kmeansRnd(d,k,n);

[model, llh] = rvmBinEm(X,t-1);
plot(llh);
y = rvmBinPred(model,X)+1;
figure;
binPlot(model,X,y);
1 change: 0 additions & 1 deletion chapter07/demo2.m → demo/ch07/rvmBinFp_demo.m
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
d = 2;
n = 1000;
[X,t] = kmeansRnd(d,k,n);
[x1,x2] = meshgrid(linspace(min(X(1,:)),max(X(1,:)),n), linspace(min(X(2,:)),max(X(2,:)),n));

[model, llh] = rvmBinFp(X,t-1);
plot(llh);
Expand Down
18 changes: 18 additions & 0 deletions demo/ch07/rvmRegEm_demo.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
%% regression
d = 100;
beta = 1e-1;
X = rand(1,d);
w = randn;
b = randn;
t = w'*X+b+beta*randn(1,d);
x = linspace(min(X),max(X),d); % test data

%% RVM regression by EM
[model,llh] = rvmRegEm(X,t);
plot(llh);
[y, sigma] = linRegPred(model,x,t);
figure
plotCurveBar(x,y,sigma);
hold on;
plot(X,t,'o');
hold off
Loading

0 comments on commit 18ad852

Please sign in to comment.