diff --git a/bishop-gibbs-gauss.pdf b/bishop-gibbs-gauss.pdf new file mode 100644 index 000000000..a2b62c017 Binary files /dev/null and b/bishop-gibbs-gauss.pdf differ diff --git a/config-local.txt b/config-local.txt index 0dba17c27..09d8d7c49 100644 --- a/config-local.txt +++ b/config-local.txt @@ -1,17 +1,4 @@ -% This file stores hard coded paths and configuration variables -% Use getConfigValue(varname) to access a value, e.g. -% getConfigValue('PMTKsupportLink') -% -% If a config-local.txt file is found, its values will override these. -% -% PMTKmetaDirs meta, docs, tmp -% PMTKred #990000 -% PMTKauthors Kevin Murphy, Matt Dunham -% PMTKcodeDirs toolbox, demos, localUtil, matlabTools -% PMTKgvizPath C:\Program Files\Graphviz2.26\bin -% PMTKlocalWikiPath C:\path\to\pmtk3Wiki % PMTKlocalDataPath /Users/kpmurphy/GoogleCode/pmtkdata % PMTKlocalSupportPath /Users/kpmurphy/GoogleCode/pmtksupport -% PMTKpmlBookSource /Users/kpmurphy/Dropbox/MLbook/Text -% PMTKpmlFigures /Users/kpmurphy/Dropbox/MLbook/Figures -% PMTKlightSpeedDir lightspeed2.3 +% PMTKgvizPath C:\Program Files\Graphviz2.26\bin + diff --git a/config.txt b/config.txt index 72e2e543b..1057c8a56 100644 --- a/config.txt +++ b/config.txt @@ -4,10 +4,11 @@ % If a config-local.txt file is found, its values will override these. % % -% Used by initPmtk3.m (UPDATE THESE DIRECTORIES TO MATCH YOUR INSTALLATION) +% Used by initPmtk3.m % PMTKgvizPath C:\Program Files\Graphviz2.26\bin % PMTKlocalDataPath path/to/data % PMTKlocalSupportPath path/to/support +% PMTKlightSpeedDir lightspeed2.3 % % Used by generateAuthorReport , pmtkTagReport, etc % PMTKred #990000 @@ -16,7 +17,8 @@ % PMTKmetaDirs meta, docs, tmp % % Used by pmlFigureCodeReport -% PMTKpmlBookSource C:\path\to\PML\Text +% PMTKpmlBookSource /Users/kpmurphy/Dropbox/MLbook/Text +% PMTKpmlFigures /Users/kpmurphy/Dropbox/MLbook/Figures % % used by publishDemos.m, generateSynopses.m % PMTKlocalWikiPath C:\path\to\pmtk3Wiki diff --git a/demos/binaryFaDemoNewsgroups.m b/demos/binaryFaDemoNewsgroups.m index 291a22da4..ecc1dc7c0 100644 --- a/demos/binaryFaDemoNewsgroups.m +++ b/demos/binaryFaDemoNewsgroups.m @@ -1,6 +1,8 @@ % Demo of factor analysis applied to binary newsgroups bag of words % We compute 2d embedding +%PMTKreallySlow + % This file is from pmtk3.googlecode.com requireStatsToolbox; % cmdscale @@ -45,12 +47,13 @@ m = m + 1; methods(m).modelname = 'JJ'; methods(m).fitFn = @(data) binaryFAfit(data, K, 'maxIter', 6, ... - 'verbose', truesize, 'computeLoglik', false); + 'verbose', true, 'computeLoglik', false); methods(m).infFn = @(model, labels) binaryFAinferLatent(model, labels); methods(m).nlatent = K; end -Ks = []; + +%Ks = []; for kk=1:numel(Ks) K = Ks(kk); m = m + 1; @@ -61,6 +64,7 @@ methods(m).nlatent = K; end + Nmethods = numel(methods); for m=1:Nmethods fitFn = methods(m).fitFn; @@ -87,7 +91,7 @@ end ndx = 1:1:D; for d=ndx(:)' - text(mdsCoords(d,1), mdsCoords(d,2), wordlist{d}, 'fontsize', 10); + %text(mdsCoords(d,1), mdsCoords(d,2), wordlist{d}, 'fontsize', 10); end nlatent = methods(m).nlatent; title(sprintf('L=%d, N=%d, method = %s', nlatent, N, methodname)) diff --git a/demos/catFAdemo.m b/demos/catFAdemo.m new file mode 100644 index 000000000..320787c7f --- /dev/null +++ b/demos/catFAdemo.m @@ -0,0 +1,117 @@ +function catFAdemo() +% Factor analysis with categorical and continuous data +% We reproduce the demo from +% http://www.cs.ubc.ca/~emtiyaz/software/mixedDataFA.html +% This just check the code runs, it is not intrinsically interesting. + +clear all +setSeed(16) +% generate data - data is stored along columns not rows! +[trainData,testData,simParams] = makeSimDataMixedDataFA(100); +nClass = simParams.nClass; +% introduce missing variables in train data +missProb = 0.1; +trainData.continuousTruth = trainData.continuous; +trainData.discreteTruth = trainData.discrete; +[D,N] = size(trainData.continuous); +miss = rand(D,N)0 + testData.continuous = yc(:,idx(nTrain+1:end)); + trainData.continuous = yc(:,idx(1:nTrain)); + else + testData.continuous = []; + trainData.continuous = []; + end + if Dd>0 + testData.discrete = yd(:,idx(nTrain+1:end)); + trainData.discrete = yd(:,idx(1:nTrain)); + else + testData.discrete = []; + trainData.discrete = []; + end + +end + diff --git a/demos/classificationShootout.m b/demos/classificationShootout.m index 2236fe1ac..cb307ac7d 100644 --- a/demos/classificationShootout.m +++ b/demos/classificationShootout.m @@ -22,6 +22,10 @@ nDataSets = numel(dataSets); methods = {'SVM', 'RVM', 'SMLR', 'RMLR'}; +if ~svmInstalled + methods = {'SVM', 'RVM', 'SMLR', 'RMLR'}; +end + nMethods = numel(methods); results = cell(nDataSets, nMethods); for i=1:nDataSets diff --git a/demos/classificationShootoutCvLambdaOnly.m b/demos/classificationShootoutCvLambdaOnly.m index 48dee64c8..4c868c5b0 100644 --- a/demos/classificationShootoutCvLambdaOnly.m +++ b/demos/classificationShootoutCvLambdaOnly.m @@ -23,6 +23,9 @@ nDataSets = numel(dataSets); methods = {'SVM', 'RVM', 'SMLR', 'RMLR'}; +if ~svmInstalled + methods = {'SVM', 'RVM', 'SMLR', 'RMLR'}; +end nMethods = numel(methods); results = cell(nDataSets, nMethods); for i=1:nDataSets diff --git a/demos/demoMinfuncHighdim.m b/demos/demoMinfuncHighdim.m index cbb31b720..45238b652 100644 --- a/demos/demoMinfuncHighdim.m +++ b/demos/demoMinfuncHighdim.m @@ -9,23 +9,22 @@ function demoMinfuncHighdim() %methods = {'sd', 'cg', 'bb'}; methods = {'sd', 'cg', 'bb', 'newton', 'newtoncg', 'bfgs', 'lbfgs'}; %methods = {'sd', 'cg', 'bb', 'newton', 'newtoncg', 'bfgs', 'lbfgs', 'newton0', 'newton0lbfgs'}; -[styles] = plotColors(); +[styles, colors, symbols, plotstyles] = plotColors(); -for i=1:length(methods) - names{i} = methods{i}(1:min(5,length(methods{i}))) -end names = {'sd', 'cg', 'bb', 'n', 'ncg', 'bfgs', 'lbfgs'}; seed = 1; setSeed(seed); dims = [10 100 500]; +dims = [10 100]; +clear str for j=1:length(dims) d = dims(j); x0 = randn(d,1); figure; for i=1:length(methods) hold on - [fx(i,j), fcount(i,j),tim(i,j)] = helper(x0, methods{i}, styles{i}); + [fx(i,j), fcount(i,j),tim(i,j)] = helper(x0, methods{i}, plotstyles{i}); str{i} = sprintf('%s (f* = %5.2f, %d feval, %3.2f sec)', ... names{i}, fx(i,j), fcount(i,j), tim(i,j)); @@ -55,6 +54,7 @@ function demoMinfuncHighdim() bar(tim(:,j)) set(gca,'xticklabel',names) title(sprintf('time d=%d', d)) + drawnow end printPmtkFigure minfuncRosenBar @@ -75,7 +75,7 @@ function demoMinfuncHighdim() options.display = 'none'; options.maxFunEvals = 500; options.tolFun = 1e-2; -options.outputFn = @optimstore; +options.outputFcn = @optimstore; options.Method = method; options.HessianModify = 1; if strcmp(method, 'newtoncg') diff --git a/demos/demoRosenConstrained.m b/demos/demoRosenConstrained.m index 179489f1f..b2b19c431 100644 --- a/demos/demoRosenConstrained.m +++ b/demos/demoRosenConstrained.m @@ -9,7 +9,7 @@ function demoRosenConstrained() % minimize 2d rosenbrock st x1^2 + x^2 <= 1 % Example from p1-8 of Mathworks Optimization Toolbox manual -requiresOptimToolbox +requireOptimToolbox xstart = [-1 2]; % Hessian is ignored by quasi-Newton so we use interior point diff --git a/demos/demoRosenHighDim.m b/demos/demoRosenHighDim.m index 615e1584f..b07197227 100644 --- a/demos/demoRosenHighDim.m +++ b/demos/demoRosenHighDim.m @@ -3,14 +3,14 @@ %% %PMTKinteractive %PMTKneedsOptimToolbox + + +% This file is from pmtk3.googlecode.com + % "A note on the extended rosenbrock function" Evol. Comp. 2006 % claims that for d=4 to 30 dims there are 2 local minima, at [1,1,...1] and % and near [-1,1,...,1]. % Let us verify this for d=4 and d=5 - -% This file is from pmtk3.googlecode.com - - %xstart = [-0.77565923 0.61309337 0.38206285 0.14597202]'; xstart = [-0.96205109 0.93573953 0.88071386 0.77787813 0.60509438]'; [f g H] = rosenbrock(xstart); @@ -22,21 +22,25 @@ % So the claim seems dubious... % %% + requireOptimToolbox x = rand(10,1); [f g H] = rosenbrock(x); figure;spy(H) title(sprintf('sparsity pattern of Hessian for extended Rosenbrock')) - + printPmtkFigure rosen10dSpy + + % Now compare speed of using Hessian or approximating it -d = 200; +d = 20; % 200; seed = 0; setSeed(seed); xstart = 2*rand(d,1)-1; -opts = optimset('display', 'off', 'DerivativeCheck', 'on'); +opts = optimset('display', 'off', 'DerivativeCheck', 'off'); [f g H] = rosenbrock(xstart); +clear options options{1} = optimset(opts, 'GradObj', 'on', 'Hessian', 'on'); % analtyic Hessian options{2} = optimset(opts, 'GradObj', 'on', 'Hessian', []); % dense numerical Hessian options{3} = optimset(opts, 'GradObj', 'on', 'HessPattern', H); % sparse numerical Hessian @@ -52,5 +56,3 @@ final t - -printPmtkFigure rosen10dSpy diff --git a/demos/discrimAnalysisHeightWeightDemo.m b/demos/discrimAnalysisHeightWeightDemo.m index 3697aee76..4c8b8df9a 100644 --- a/demos/discrimAnalysisHeightWeightDemo.m +++ b/demos/discrimAnalysisHeightWeightDemo.m @@ -4,6 +4,7 @@ % This file is from pmtk3.googlecode.com +clear all rawdata = loadData('heightWeight'); data.Y = rawdata(:,1); % 1=male, 2=female data.X = [rawdata(:,2) rawdata(:,3)]; % height, weight diff --git a/demos/gpcDemo2d.m b/demos/gpcDemo2d.m index f6be37ea1..73fb3228a 100644 --- a/demos/gpcDemo2d.m +++ b/demos/gpcDemo2d.m @@ -13,6 +13,7 @@ x = [x1 x2]'; y = [repmat(-1,1,n1) repmat(1,1,n2)]'; [t1 t2] = meshgrid(-4:0.1:4,-4:0.1:4); +t = [t1(:) t2(:)]; % test % training loghyper = [0; 0]; % initial guess diff --git a/demos/hclustYeastDemo.m b/demos/hclustYeastDemo.m index a4a6c9968..06f4d6fc0 100644 --- a/demos/hclustYeastDemo.m +++ b/demos/hclustYeastDemo.m @@ -1,12 +1,11 @@ %% Hierarchical Clustering Demo %PMTKneedsStatsToobox cluster, pdist, linkage -%PMTKneedsBioToolbox clustergram %% % This file is from pmtk3.googlecode.com requireStatsToolbox -requireBioinfoToolbox +%requireBioinfoToolbox loadData('yeastData310') % 'X', 'genes', 'times'); corrDist = pdist(X, 'corr'); @@ -22,11 +21,12 @@ printPmtkFigure('clusterYeastHier16') +if bioinfoToolboxInstalled figure(5);clf clustergram(X(:,2:end),'RowLabels',genes, 'ColumnLabels',times(2:end)) title('hierarchical clustering') printPmtkFigure('clusterYeastRowPerm') - +end figure(6); clf dendrogram(linkage(corrDist, 'average')); diff --git a/demos/hmmNbestDemo.m b/demos/hmmNbestDemo.m index 6a3812e54..f2d847e79 100644 --- a/demos/hmmNbestDemo.m +++ b/demos/hmmNbestDemo.m @@ -3,6 +3,8 @@ % Nilsson and Goldberger, IJCAI 2001 % We do the example in sec 3.2 +%PMTKinprogress + initDist = [0.6; 0.4]; transmat = [0.6 0.4; 0.2 0.8]; obsmat = [0.9 0.1; 0.3 0.7]; diff --git a/demos/kpcaDemo2.m b/demos/kpcaDemo2.m index d8785d351..afb8908fc 100644 --- a/demos/kpcaDemo2.m +++ b/demos/kpcaDemo2.m @@ -31,16 +31,16 @@ X = patterns; % KPCA -[Zkpca, mappingKPCA] = kernel_pca(X, 2, 'gauss', rbf_var); -%Zkpca = kpcaSimple(X',2)'; % RBF kernel, sigma=1 +%[Zkpca, mappingKPCA] = kernel_pca(X, 2, 'gauss', rbf_var); +Zkpca = kpcaSimple(X',2)'; % RBF kernel, sigma=1 figure; plot(Zkpca(:,1), Zkpca(:,2), 'x', 'markersize', 10, 'linewidth', 3) title('kpca', 'fontsize', 16); printPmtkFigure('kpcaDemo2Kernel') % PCA -%[B, Zpca, evals, Xrecon, mu] = pcaPmtk(X, 2); -[Zpca, mappingPCA] = pca(X, 2); +[B, Zpca, evals, Xrecon, mu] = pcaPmtk(X, 2); +%[Zpca, mappingPCA] = pca(X, 2); figure; plot(Zpca(:,1), Zpca(:,2), 'x', 'markersize', 10, 'linewidth', 3) title('pca', 'fontsize', 16); diff --git a/demos/multilevelLinregDemo.m b/demos/multilevelLinregDemo.m index 86449a8c2..c2551ab60 100644 --- a/demos/multilevelLinregDemo.m +++ b/demos/multilevelLinregDemo.m @@ -7,7 +7,7 @@ %% read math score data plotFig = 1; %loadData('mathDataHoff', 'iszipfile', false, 'ismatfile', false, 'dataset', 'mathDataHoff'); -Y = importData('mathDataHoff.csv'); +Y = importdata('mathDataHoff.csv'); y = Y.data; names = Y.colheaders; xvals = [-3:.1:3]'; diff --git a/demos/svmBinaryClassifDemo.m b/demos/svmBinaryClassifDemo.m index 39ead5496..d57001a2c 100644 --- a/demos/svmBinaryClassifDemo.m +++ b/demos/svmBinaryClassifDemo.m @@ -2,7 +2,7 @@ % This file is from pmtk3.googlecode.com - +%PMTKbroken %% Load Data % Load synthetic data generated from a mixture of 2 Gaussians. Source: diff --git a/linregStudentTest b/linregStudentTest new file mode 100644 index 000000000..ce31e5053 --- /dev/null +++ b/linregStudentTest @@ -0,0 +1,9 @@ +\begin{tabular}{ccccccc} +0.291 & 0.296 & 0.863 & 2.479 & 0.038 & 0.361 & 2.793 \\ +\hline +0.250 & 1.004 & 0.219 & 3.116 & 0.016 & 0.993 & 2.556 \\ +0.059 & 2.065 & -1.631 & 5.212 & 0.008 & 2.089 & 1.888 \\ +0.068 & 2.360 & -1.900 & 5.272 & 0.006 & 2.469 & 1.481 \\ +0.042 & 2.514 & -1.937 & 5.108 & 0.005 & 2.640 & 1.257 \\ +0.181 & 0.089 & 0.818 & 2.557 & 0.278 & 0.262 & 2.832 \\ +\end{tabular} diff --git a/matlabTools/graph/mkRootedTree.m b/matlabTools/graph/mkRootedTree.m index 98159ab1a..1b37e587b 100644 --- a/matlabTools/graph/mkRootedTree.m +++ b/matlabTools/graph/mkRootedTree.m @@ -73,7 +73,7 @@ [d dt ft pred] = dfs(adjMat,root,1); %#ok (gaimc package) % dt is discovery time, pred is predecessor in search dt(root) = 0; -[~, preorder]= sort(dt); +[junk, preorder]= sort(dt); preorder = rowvec(preorder); T = sparse(Nnodes, Nnodes); diff --git a/pmtkTools/testing/runDemos.m b/pmtkTools/testing/runDemos.m index 426f5ef76..cdb8927a3 100644 --- a/pmtkTools/testing/runDemos.m +++ b/pmtkTools/testing/runDemos.m @@ -231,14 +231,6 @@ end pclear(5.000000e-01); -disp('running binaryFaDemoNewsgroups.m'); - try - binaryFaDemoNewsgroups; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running binaryFaDemoTipping.m'); try binaryFaDemoTipping; @@ -327,17 +319,17 @@ end pclear(5.000000e-01); -disp('running catFAdemoAuto.m'); +disp('running catFAdemo.m'); try - catFAdemoAuto; + catFAdemo; catch ME disp(ME.message); end pclear(5.000000e-01); -disp('running catFAtest.m'); +disp('running catFAdemoAuto.m'); try - catFAtest; + catFAdemoAuto; catch ME disp(ME.message); end @@ -383,14 +375,6 @@ end pclear(5.000000e-01); -disp('running clusterYeast.m'); - try - clusterYeast; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running coinBayesFactorDemo.m'); try coinBayesFactorDemo; @@ -1103,14 +1087,6 @@ end pclear(5.000000e-01); -disp('running getCountsMarkovLanguageEx.m'); - try - getCountsMarkovLanguageEx; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running ggmFitDemo.m'); try ggmFitDemo; @@ -1191,14 +1167,6 @@ end pclear(5.000000e-01); -disp('running gpnnCov.m'); - try - gpnnCov; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running gpnnDemo.m'); try gpnnDemo; @@ -1351,14 +1319,6 @@ end pclear(5.000000e-01); -disp('running hmmNbestDemo.m'); - try - hmmNbestDemo; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running hmmSelfLoopDist.m'); try hmmSelfLoopDist; @@ -3119,14 +3079,6 @@ end pclear(5.000000e-01); -disp('running readlines.m'); - try - readlines; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running regtreeSurfaceDemo.m'); try regtreeSurfaceDemo; @@ -3543,14 +3495,6 @@ end pclear(5.000000e-01); -disp('running stream2text.m'); - try - stream2text; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running studentFitDemo.m'); try studentFitDemo; @@ -3631,14 +3575,6 @@ end pclear(5.000000e-01); -disp('running svmBinaryClassifDemo.m'); - try - svmBinaryClassifDemo; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running svmLogregComparison.m'); try svmLogregComparison; @@ -3671,14 +3607,6 @@ end pclear(5.000000e-01); -disp('running testLanguageClassifier.m'); - try - testLanguageClassifier; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running testSprinklerDemo.m'); try testSprinklerDemo; @@ -3687,14 +3615,6 @@ end pclear(5.000000e-01); -disp('running text2stream.m'); - try - text2stream; - catch ME - disp(ME.message); - end - pclear(5.000000e-01); - disp('running tikhonovDemo.m'); try tikhonovDemo; diff --git a/readme.txt b/readme.txt index 00dddd570..f09727518 100644 --- a/readme.txt +++ b/readme.txt @@ -1,6 +1,12 @@ Probabilistic modeling toolkit version 3 See pmtk3.googlecode.com for details. +Steps: +1. run initPmtk3.m (takes a few minutes) +2. run testPmtk3.m (takes under a minute) +3. run runDemos.m (takes about 1 hour) + + The code relies on various external toolboxes (in addition to those in pmtksupport), listed below: diff --git a/svn-commit.2.tmp~ b/svn-commit.2.tmp~ index 61e3afae9..9355ef4e7 100644 --- a/svn-commit.2.tmp~ +++ b/svn-commit.2.tmp~ @@ -1,13 +1,4 @@ --This line, and those below, will be ignored-- -A svn-commit.tmp~ -A svn-commit.2.tmp~ -A svn-commit.tmp -M pmtkTools/testing/makeRunDemos.m -M pmtkTools/testing/runDemos.m -M demos/gaussImputationDemoEM.m -A demos/bishop-gibbs-gauss.pdf -A demos/linregStudentTest -M demos/discreteDensityModelsShootout.m -M readme.txt +D problems.txt diff --git a/toolbox/Algorithms/kernels/gpnnCov.m b/toolbox/Algorithms/kernels/gpnnCov.m new file mode 100644 index 000000000..63fb90e13 --- /dev/null +++ b/toolbox/Algorithms/kernels/gpnnCov.m @@ -0,0 +1,60 @@ +function [S,K] = nn_cov_mat3(vec); +% Compute the covariance matrix for a 1 hidden layer neural net +% with input bias variance vara, input wts variance varu, +% output weight variance varout and output bias var varb +% and # hidden units = nhid +% +% 13 May 1996, modified from nn_cov_mat.m +% +% modified from nn_cov_nat2 by removing varnoi contribution + % ckiw, 8 June 2005 + +% differs from nn_cov_mat.m in that it takes varb as well + +global XVEC XTEST; +n = length(XVEC); +ltest = length(XTEST); +S = zeros(n,n); +K = zeros(ltest,n); +%varnoi = 0.0025; % noise level used on intar2d data +%N = varnoi * eye(n); + +% parse input vars +nhid = vec(1); +varu = vec(2)*vec(2); % inputs are std's not variances +vara = vec(3)*vec(3); +varout = vec(4)*vec(4); +varb = vec(5)*vec(5); + + +% setup S and K + +f1= 2/pi; + +for i = 1:n + cii = varu*XVEC(:,i)'*XVEC(:,i) + vara; + for j = i:n + cjj = varu*XVEC(:,j)'*XVEC(:,j) + vara; + cij = varu*XVEC(:,i)'*XVEC(:,j) + vara; + den = (sqrt(1+2*cii))*(sqrt(1+2*cjj)); + S(i,j) = varb + varout*nhid*f1*asin(2*cij/den); + if (i ~= j) + S(j,i) = S(i,j); + end + end + + + for k = 1:ltest + ckk = varu*XTEST(:,k)'*XTEST(:,k) + vara; + cik = varu*XTEST(:,k)'*XVEC(:,i) + vara; + den = (sqrt(1+2*cii))*(sqrt(1+2*ckk)); + K(k,i) = varb + varout*nhid*f1*asin(2*cik/den); + end + +end % matches i = 1:n + + + + + + diff --git a/toolbox/Algorithms/optimization/optimstore.m b/toolbox/Algorithms/optimization/optimstore.m index c77684748..303ba1866 100644 --- a/toolbox/Algorithms/optimization/optimstore.m +++ b/toolbox/Algorithms/optimization/optimstore.m @@ -1,6 +1,7 @@ function stop = optimstore(x, optimValues, state) % store history of optimization vlaues + % This file is from pmtk3.googlecode.com global xhist fhist funcounthist diff --git a/toolbox/LatentVariableModels/hmm/sub/hmmFwdBackMaxProduct.m b/toolbox/LatentVariableModels/hmm/sub/hmmFwdBackMaxProduct.m index 37fdaaab5..ecd3e9d66 100644 --- a/toolbox/LatentVariableModels/hmm/sub/hmmFwdBackMaxProduct.m +++ b/toolbox/LatentVariableModels/hmm/sub/hmmFwdBackMaxProduct.m @@ -33,11 +33,13 @@ %gamma = normalize(alpha .* beta, 1);% make each column sum to 1 +%{ for t=T-1:-1:1 b = beta(:,t+1) .* softev(:,t+1); tmpXi = transmat .* (alpha(:,t) * b'); xi_summed = xi_summed + tmpXi./sum(tmpXi(:)); % inlined call to normalize end +%} end