Skip to content

Commit

Permalink
Fixed step size for likelihood tests and allowed randomizing of laplace
Browse files Browse the repository at this point in the history
  • Loading branch information
alansaul committed Nov 27, 2013
1 parent 0f60fba commit f59125d
Showing 1 changed file with 25 additions and 11 deletions.
36 changes: 25 additions & 11 deletions GPy/testing/likelihoods_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
import inspect
from GPy.likelihoods.noise_models import gp_transformations
from functools import partial
#np.random.seed(300)
np.random.seed(690)

def dparam_partial(inst_func, *args):
"""
Expand Down Expand Up @@ -144,7 +146,7 @@ def constrain_bounded(regex, model, lower, upper):
"model": GPy.likelihoods.student_t(deg_free=5, sigma2=self.var),
"grad_params": {
"names": ["t_noise"],
"vals": [1],
"vals": [1.0],
"constraints": [constrain_positive]
},
"laplace": True
Expand All @@ -158,6 +160,15 @@ def constrain_bounded(regex, model, lower, upper):
},
"laplace": True
},
"Student_t_large_var": {
"model": GPy.likelihoods.student_t(deg_free=5, sigma2=self.var),
"grad_params": {
"names": ["t_noise"],
"vals": [10.0],
"constraints": [constrain_positive]
},
"laplace": True
},
"Student_t_approx_gauss": {
"model": GPy.likelihoods.student_t(deg_free=1000, sigma2=self.var),
"grad_params": {
Expand Down Expand Up @@ -315,9 +326,11 @@ def constrain_bounded(regex, model, lower, upper):
def t_logpdf(self, model, Y, f):
print "\n{}".format(inspect.stack()[0][3])
print model
print model._get_params()
np.testing.assert_almost_equal(
np.log(model.pdf(f.copy(), Y.copy())),
model.logpdf(f.copy(), Y.copy()))
model.pdf(f.copy(), Y.copy()),
np.exp(model.logpdf(f.copy(), Y.copy()))
)

@with_setup(setUp, tearDown)
def t_dlogpdf_df(self, model, Y, f):
Expand Down Expand Up @@ -363,7 +376,7 @@ def t_dlogpdf_dparams(self, model, Y, f, params, param_constraints):
assert (
dparam_checkgrad(model.logpdf, model.dlogpdf_dtheta,
params, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
randomize=True, verbose=True)
)

@with_setup(setUp, tearDown)
Expand All @@ -373,7 +386,7 @@ def t_dlogpdf_df_dparams(self, model, Y, f, params, param_constraints):
assert (
dparam_checkgrad(model.dlogpdf_df, model.dlogpdf_df_dtheta,
params, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
randomize=True, verbose=True)
)

@with_setup(setUp, tearDown)
Expand All @@ -383,7 +396,7 @@ def t_d2logpdf2_df2_dparams(self, model, Y, f, params, param_constraints):
assert (
dparam_checkgrad(model.d2logpdf_df2, model.d2logpdf_df2_dtheta,
params, args=(f, Y), constraints=param_constraints,
randomize=False, verbose=True)
randomize=True, verbose=True)
)

################
Expand Down Expand Up @@ -478,7 +491,7 @@ def t_laplace_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names,
print "\n{}".format(inspect.stack()[0][3])
#Normalize
Y = Y/Y.max()
white_var = 0.001
white_var = 1e-6
kernel = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
laplace_likelihood = GPy.likelihoods.Laplace(Y.copy(), model)
m = GPy.models.GPRegression(X.copy(), Y.copy(), kernel, likelihood=laplace_likelihood)
Expand All @@ -490,12 +503,13 @@ def t_laplace_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names,
m[name] = param_vals[param_num]
constraints[param_num](name, m)

print m
m.randomize()
m.optimize(max_iters=8)
#m.optimize(max_iters=8)
print m
m.checkgrad(verbose=1, step=step)
if not m.checkgrad(step=step):
m.checkgrad(verbose=1, step=step)
#if not m.checkgrad(step=step):
#m.checkgrad(verbose=1, step=step)
#import ipdb; ipdb.set_trace()
#NOTE this test appears to be stochastic for some likelihoods (student t?)
# appears to all be working in test mode right now...
Expand All @@ -509,7 +523,7 @@ def t_ep_fit_rbf_white(self, model, X, Y, f, step, param_vals, param_names, cons
print "\n{}".format(inspect.stack()[0][3])
#Normalize
Y = Y/Y.max()
white_var = 0.001
white_var = 1e-6
kernel = GPy.kern.rbf(X.shape[1]) + GPy.kern.white(X.shape[1])
ep_likelihood = GPy.likelihoods.EP(Y.copy(), model)
m = GPy.models.GPRegression(X.copy(), Y.copy(), kernel, likelihood=ep_likelihood)
Expand Down

0 comments on commit f59125d

Please sign in to comment.