diff --git a/TODO.txt b/TODO.txt index 35ce761..b7b6565 100644 --- a/TODO.txt +++ b/TODO.txt @@ -1,5 +1,4 @@ TODO: -chapter10: compute bound terms (entropy) inside each factors chapter10/12: prediction functions for VB chapter05: MLP chapter08: BP, EP diff --git a/chapter01/condEntropy.m b/chapter01/condEntropy.m index df8ec94..bc43d52 100644 --- a/chapter01/condEntropy.m +++ b/chapter01/condEntropy.m @@ -1,6 +1,9 @@ function z = condEntropy (x, y) -% Compute conditional entropy H(x|y) of two discrete variables x and y. -% x, y: two vectors of integers of the same length +% Compute conditional entropy z=H(x|y) of two discrete variables x and y. +% Input: +% x, y: two vectors of integers of the same length +% Output: +% z: conditional entropy z=H(x|y) % Written by Mo Chen (sth4nth@gmail.com). assert(numel(x) == numel(y)); n = numel(x); diff --git a/chapter01/entropy.m b/chapter01/entropy.m index 4ba1596..dbdbc84 100644 --- a/chapter01/entropy.m +++ b/chapter01/entropy.m @@ -1,6 +1,9 @@ function z = entropy(x) -% Compute entropy H(x) of a discrete variable x. -% x: a vectors of integers +% Compute entropy z=H(x) of a discrete variable x. +% Input: +% x: a vectors of integers +% Output: +% z: entropy z=H(x) % Written by Mo Chen (sth4nth@gmail.com). n = numel(x); x = reshape(x,1,n); diff --git a/chapter01/jointEntropy.m b/chapter01/jointEntropy.m index 33d58f2..accaa7b 100644 --- a/chapter01/jointEntropy.m +++ b/chapter01/jointEntropy.m @@ -1,6 +1,9 @@ function z = jointEntropy(x, y) -% Compute joint entropy H(x,y) of two discrete variables x and y. -% x, y: two vectors of integers of the same length +% Compute joint entropy z=H(x,y) of two discrete variables x and y. +% Input: +% x, y: two vectors of integers of the same length +% Output: +% z: joint entroy z=H(x,y) % Written by Mo Chen (sth4nth@gmail.com). assert(numel(x) == numel(y)); n = numel(x); diff --git a/chapter01/mutInfo.m b/chapter01/mutInfo.m index d3d306d..4d7023b 100644 --- a/chapter01/mutInfo.m +++ b/chapter01/mutInfo.m @@ -1,6 +1,9 @@ function z = mutInfo(x, y) % Compute mutual information I(x,y) of two discrete variables x and y. -% x, y: two vectors of integers of the same length +% Input: +% x, y: two vectors of integers of the same length +% Output: +% z: mutual information z=I(x,y) % Written by Mo Chen (sth4nth@gmail.com). assert(numel(x) == numel(y)); n = numel(x); diff --git a/chapter01/nmi.m b/chapter01/nmi.m index 45f6538..816fd05 100644 --- a/chapter01/nmi.m +++ b/chapter01/nmi.m @@ -1,6 +1,9 @@ function z = nmi(x, y) -% Compute normalized mutual information I(x,y)/sqrt(H(x)*H(y)). -% x, y: two vectors of integers of the same length +% Compute normalized mutual information I(x,y)/sqrt(H(x)*H(y)) of two discrete variables x and y. +% Input: +% x, y: two vectors of integers of the same length +% Ouput: +% z: normalized mutual information z=I(x,y)/sqrt(H(x)*H(y)) % Written by Mo Chen (sth4nth@gmail.com). assert(numel(x) == numel(y)); n = numel(x); diff --git a/chapter01/nvi.m b/chapter01/nvi.m index 5bfc89b..d8b614b 100644 --- a/chapter01/nvi.m +++ b/chapter01/nvi.m @@ -1,6 +1,9 @@ function z = nvi(x, y) -% Compute normalized variation information (1-I(x,y)/H(x,y)). -% x, y: two vectors of integers of the same length +% Compute normalized variation information z=(1-I(x,y)/H(x,y)) of two discrete variables x and y. +% Input: +% x, y: two vectors of integers of the same length +% Output: +% z: normalized variation information z=(1-I(x,y)/H(x,y)) % Written by Mo Chen (sth4nth@gmail.com). assert(numel(x) == numel(y)); n = numel(x); diff --git a/chapter01/relatEntropy.m b/chapter01/relatEntropy.m index 64e8d60..60b84e6 100644 --- a/chapter01/relatEntropy.m +++ b/chapter01/relatEntropy.m @@ -1,6 +1,9 @@ function z = relatEntropy (x, y) -% Compute relative entropy (a.k.a KL divergence) KL(p(x)||p(y)) of two discrete variables x and y. -% x, y: two vectors of integers of the same length +% Compute relative entropy (a.k.a KL divergence) z=KL(p(x)||p(y)) of two discrete variables x and y. +% Input: +% x, y: two vectors of integers of the same length +% Output: +% z: relative entropy (a.k.a KL divergence) z=KL(p(x)||p(y)) % Written by Mo Chen (sth4nth@gmail.com). assert(numel(x) == numel(y)); n = numel(x);