Skip to content

Commit

Permalink
Merge branch 'devel' of github.com:SheffieldML/GPy into devel
Browse files Browse the repository at this point in the history
  • Loading branch information
alansaul committed Oct 4, 2015
2 parents 2dc1b14 + e6261c7 commit e1b1faa
Show file tree
Hide file tree
Showing 27 changed files with 328 additions and 91 deletions.
66 changes: 45 additions & 21 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
sudo: false

os:
- osx
- linux
# - osx

language: python
#language: python

#addons:
# apt:
Expand All @@ -14,28 +14,52 @@ language: python
# - libatlas-base-dev
# - liblapack-dev

python:
- 2.7
- 3.3
- 3.4
cache:
directories:
- $HOME/download/
- $HOME/install/

env:
- PYTHON_VERSION=2.7
- PYTHON_VERSION=3.5

before_install:
- wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
- chmod +x miniconda.sh
- ./miniconda.sh -b
- export PATH=/home/travis/miniconda/bin:$PATH
# - conda update --yes conda
- export CONDA_CACHED=1
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then
export OS=Linux;
elif [[ "$TRAVIS_OS_NAME" == "osx" ]]; then
export OS=MacOSX;
else
echo "OS not supported yet";
exit 1;
fi;
- if [[ $PYTHON_VERSION == "2.7" ]]; then
export MINICONDA=Miniconda;
elif [[ $PYTHON_VERSION == 3* ]]; then
export MINICONDA=Miniconda3;
else
echo "Could not find python version";
exit 1;
fi;
- if [ ! -d $HOME/download/ ]; then mkdir $HOME/download/; fi;
- if [ ! -d $HOME/install/ ]; then mkdir $HOME/install/; fi;
- export MINICONDA_FILE=$MINICONDA-latest-$OS-x86_64-$PYTHON_VERSION
- export MINCONDA_CACHE_FILE=$HOME/download/$MINICONDA_FILE.sh
- export MINICONDA_INSTALL=$HOME/install/$MINICONDA_FILE
- if [ ! -f $MINCONDA_CACHE_FILE ]; then
export CONDA_CACHED=0;
wget http://repo.continuum.io/miniconda/$MINICONDA-latest-$OS-x86_64.sh -O $MINCONDA_CACHE_FILE;
bash $MINCONDA_CACHE_FILE -b -p $MINICONDA_INSTALL;
fi;
- export PATH="$MINICONDA_INSTALL/bin:$PATH";

install:
- conda install --yes python=$TRAVIS_PYTHON_VERSION numpy=1.9 scipy=0.16 nose pip six
- pip install .

- conda install --yes python=$PYTHON_VERSION numpy=1.9 scipy=0.16 nose pip six matplotlib;
- pip install codecov
- python setup.py develop

script:
- cd $HOME
- mkdir empty
- cd empty
- nosetests GPy.testing
- coverage run travis_tests.py

cache:
directories:
- $HOME/.cache/pip
after_success:
- codecov
20 changes: 13 additions & 7 deletions GPy/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,18 +34,24 @@ def tests(verbose=10):
def tests(verbose=10):
Tester(testing).test(verbose=verbose)

def load(file_path):
def load(file_or_path):
"""
Load a previously pickled model, using `m.pickle('path/to/file.pickle)'
:param file_name: path/to/file.pickle
"""
import cPickle as pickle
try:
with open(file_path, 'rb') as f:
m = pickle.load(f)
import cPickle as pickle
if isinstance(file_or_path, basestring):
with open(file_or_path, 'rb') as f:
m = pickle.load(f)
else:
m = pickle.load(file_or_path)
except:
import pickle as pickle
with open(file_path, 'rb') as f:
m = pickle.load(f)
import pickle
if isinstance(file_or_path, basestring):
with open(file_or_path, 'rb') as f:
m = pickle.load(f)
else:
m = pickle.load(file_or_path)
return m
12 changes: 6 additions & 6 deletions GPy/core/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -368,19 +368,19 @@ def _checkgrad(self, target_param=None, verbose=False, step=1e-6, tolerance=1e-3
for nind, xind in zip(param_index, transformed_index):
xx = x.copy()
xx[xind] += step
f1 = self._objective(xx)
f1 = float(self._objective(xx))
xx[xind] -= 2.*step
f2 = self._objective(xx)
f2 = float(self._objective(xx))
#Avoid divide by zero, if any of the values are above 1e-15, otherwise both values are essentiall
#the same
if f1 > 1e-15 or f1 < -1e-15 or f2 > 1e-15 or f2 < -1e-15:
df_ratio = np.abs((f1 - f2) / min(f1, f2))
else:
df_ratio = 1.0
df_unstable = df_ratio < df_tolerance
numerical_gradient = (f1 - f2) / (2 * step)
numerical_gradient = (f1 - f2) / (2. * step)
if np.all(gradient[xind] == 0): ratio = (f1 - f2) == gradient[xind]
else: ratio = (f1 - f2) / (2 * step * gradient[xind])
else: ratio = (f1 - f2) / (2. * step * gradient[xind])
difference = np.abs(numerical_gradient - gradient[xind])

if (np.abs(1. - ratio) < tolerance) or np.abs(difference) < tolerance:
Expand Down Expand Up @@ -422,7 +422,7 @@ def _repr_html_(self):
to_print.append(super(Model, self)._repr_html_())
return "\n".join(to_print)

def __str__(self):
def __str__(self, VT100=True):
model_details = [['Name', self.name],
['Log-likelihood', '{}'.format(float(self.log_likelihood()))],
["Number of Parameters", '{}'.format(self.size)],
Expand All @@ -432,6 +432,6 @@ def __str__(self):
from operator import itemgetter
max_len = reduce(lambda a, b: max(len(b[0]), a), model_details, 0)
to_print = [""] + ["{0:{l}} : {1}".format(name, detail, l=max_len) for name, detail in model_details] + ["Parameters:"]
to_print.append(super(Model, self).__str__())
to_print.append(super(Model, self).__str__(VT100=VT100))
return "\n".join(to_print)

2 changes: 1 addition & 1 deletion GPy/core/parameterization/parameter_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,8 +227,8 @@ class Nameable(Gradcheckable):
Make an object nameable inside the hierarchy.
"""
def __init__(self, name, *a, **kw):
super(Nameable, self).__init__(*a, **kw)
self._name = name or self.__class__.__name__
super(Nameable, self).__init__(*a, **kw)

@property
def name(self):
Expand Down
7 changes: 5 additions & 2 deletions GPy/core/parameterization/parameterized.py
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,7 @@ def _repr_html_(self, header=True):
</style>"""
return style + '\n' + '<table class="tg">' + '\n'.format(sep).join(to_print) + '\n</table>'

def __str__(self, header=True):
def __str__(self, header=True, VT100=True):
name = adjust_name_for_printing(self.name) + "."
constrs = self._constraints_str;
ts = self._ties_str
Expand All @@ -416,7 +416,10 @@ def __str__(self, header=True):
cl = max([len(str(x)) if x else 0 for x in constrs + ["Constraint"]])
tl = max([len(str(x)) if x else 0 for x in ts + ["Tied to"]])
pl = max([len(str(x)) if x else 0 for x in prirs + ["Prior"]])
format_spec = " \033[1m{{name:<{0}s}}\033[0;0m | {{desc:>{1}s}} | {{const:^{2}s}} | {{pri:^{3}s}} | {{t:^{4}s}}".format(nl, sl, cl, pl, tl)
if VT100:
format_spec = " \033[1m{{name:<{0}s}}\033[0;0m | {{desc:>{1}s}} | {{const:^{2}s}} | {{pri:^{3}s}} | {{t:^{4}s}}".format(nl, sl, cl, pl, tl)
else:
format_spec = " {{name:<{0}s}} | {{desc:>{1}s}} | {{const:^{2}s}} | {{pri:^{3}s}} | {{t:^{4}s}}".format(nl, sl, cl, pl, tl)
to_print = []
for n, d, c, t, p in zip(names, desc, constrs, ts, prirs):
to_print.append(format_spec.format(name=n, desc=d, const=c, t=t, pri=p))
Expand Down
3 changes: 2 additions & 1 deletion GPy/core/sparse_gp.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,10 +42,11 @@ class SparseGP(GP):

def __init__(self, X, Y, Z, kernel, likelihood, mean_function=None, X_variance=None, inference_method=None,
name='sparse gp', Y_metadata=None, normalizer=False):

#pick a sensible inference method
if inference_method is None:
if isinstance(likelihood, likelihoods.Gaussian):
inference_method = var_dtc.VarDTC(limit=1 if not self.missing_data else Y.shape[1])
inference_method = var_dtc.VarDTC(limit=1)
else:
#inference_method = ??
raise NotImplementedError("what to do what to do?")
Expand Down
28 changes: 28 additions & 0 deletions GPy/inference/optimization/optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,33 @@ def opt(self, f_fp=None, f=None, fp=None):
#a more helpful error message is available in opt_result in the Error case
if opt_result[2]['warnflag']==2:
self.status = 'Error' + str(opt_result[2]['task'])

class opt_bfgs(Optimizer):
def __init__(self, *args, **kwargs):
Optimizer.__init__(self, *args, **kwargs)
self.opt_name = "BFGS (Scipy implementation)"

def opt(self, f_fp=None, f=None, fp=None):
"""
Run the optimizer
"""
rcstrings = ['','Maximum number of iterations exceeded', 'Gradient and/or function calls not changing']

opt_dict = {}
if self.xtol is not None:
print("WARNING: bfgs doesn't have an xtol arg, so I'm going to ignore it")
if self.ftol is not None:
print("WARNING: bfgs doesn't have an ftol arg, so I'm going to ignore it")
if self.gtol is not None:
opt_dict['pgtol'] = self.gtol

opt_result = optimize.fmin_bfgs(f, self.x_init, fp, disp=self.messages,
maxiter=self.max_iters, full_output=True, **opt_dict)
self.x_opt = opt_result[0]
self.f_opt = f_fp(self.x_opt)[0]
self.funct_eval = opt_result[4]
self.status = rcstrings[opt_result[6]]

class opt_simplex(Optimizer):
def __init__(self, *args, **kwargs):
Expand Down Expand Up @@ -255,6 +282,7 @@ def get_optimizer(f_min):
optimizers = {'fmin_tnc': opt_tnc,
'simplex': opt_simplex,
'lbfgsb': opt_lbfgsb,
'org-bfgs': opt_bfgs,
'scg': opt_SCG,
'adadelta':Opt_Adadelta}

Expand Down
6 changes: 3 additions & 3 deletions GPy/kern/_src/psi_comp/gaussherm.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def comp_K(self, Z, qX):
self.Xs = ObsAr(np.empty((self.degree,)+qX.mean.shape))
mu, S = qX.mean.values, qX.variance.values
S_sq = np.sqrt(S)
for i in xrange(self.degree):
for i in range(self.degree):
self.Xs[i] = self.locs[i]*S_sq+mu
return self.Xs

Expand All @@ -46,7 +46,7 @@ def psicomputations(self, kern, Z, qX, return_psi2_n=False):
psi0 = np.zeros((N,))
psi1 = np.zeros((N,M))
psi2 = np.zeros((N,M,M)) if return_psi2_n else np.zeros((M,M))
for i in xrange(self.degree):
for i in range(self.degree):
if self.cache_K:
X = Xs[i]
else:
Expand Down Expand Up @@ -74,7 +74,7 @@ def psiDerivativecomputations(self, kern, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, qX):
dZ = np.zeros_like(Z)
dmu = np.zeros_like(mu)
dS = np.zeros_like(S)
for i in xrange(self.degree):
for i in range(self.degree):
if self.cache_K:
X = Xs[i]
else:
Expand Down
24 changes: 22 additions & 2 deletions GPy/kern/_src/rbf.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
from .psi_comp import PSICOMP_RBF
from .psi_comp.rbf_psi_gpucomp import PSICOMP_RBF_GPU
from ...util.config import *
from ...core import Param
from GPy.core.parameterization.transformations import Logexp

class RBF(Stationary):
"""
Expand All @@ -18,12 +20,17 @@ class RBF(Stationary):
"""
_support_GPU = True
def __init__(self, input_dim, variance=1., lengthscale=None, ARD=False, active_dims=None, name='rbf', useGPU=False):
def __init__(self, input_dim, variance=1., lengthscale=None, ARD=False, active_dims=None, name='rbf', useGPU=False, inv_l=False):
super(RBF, self).__init__(input_dim, variance, lengthscale, ARD, active_dims, name, useGPU=useGPU)
if self.useGPU:
self.psicomp = PSICOMP_RBF_GPU()
else:
self.psicomp = PSICOMP_RBF()
self.use_invLengthscale = inv_l
if inv_l:
self.unlink_parameter(self.lengthscale)
self.inv_l = Param('inv_lengthscale',1./self.lengthscale**2, Logexp())
self.link_parameter(self.inv_l)

def K_of_r(self, r):
return self.variance * np.exp(-0.5 * r**2)
Expand All @@ -47,6 +54,10 @@ def __setstate__(self, state):
def spectrum(self, omega):
assert self.input_dim == 1 #TODO: higher dim spectra?
return self.variance*np.sqrt(2*np.pi)*self.lengthscale*np.exp(-self.lengthscale*2*omega**2/2)

def parameters_changed(self):
if self.use_invLengthscale: self.lengthscale[:] = 1./np.sqrt(self.inv_l+1e-200)
super(RBF,self).parameters_changed()

#---------------------------------------#
# PSI statistics #
Expand All @@ -68,10 +79,19 @@ def update_gradients_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variati
dL_dvar, dL_dlengscale = self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[:2]
self.variance.gradient = dL_dvar
self.lengthscale.gradient = dL_dlengscale
if self.use_invLengthscale:
self.inv_l.gradient = dL_dlengscale*(self.lengthscale**3/-2.)

def gradients_Z_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
return self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[2]

def gradients_qX_expectations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior):
return self.psicomp.psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, Z, variational_posterior)[3:]


def update_gradients_diag(self, dL_dKdiag, X):
super(RBF,self).update_gradients_diag(dL_dKdiag, X)
if self.use_invLengthscale: self.inv_l.gradient =self.lengthscale.gradient*(self.lengthscale**3/-2.)

def update_gradients_full(self, dL_dK, X, X2=None):
super(RBF,self).update_gradients_full(dL_dK, X, X2)
if self.use_invLengthscale: self.inv_l.gradient =self.lengthscale.gradient*(self.lengthscale**3/-2.)
2 changes: 1 addition & 1 deletion GPy/mappings/linear.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class Linear(Mapping):
"""

def __init__(self, input_dim, output_dim, name='linmap'):
Mapping.__init__(self, input_dim=input_dim, output_dim=output_dim, name=name)
super(Linear, self).__init__(input_dim=input_dim, output_dim=output_dim, name=name)
self.A = Param('A', np.random.randn(self.input_dim, self.output_dim))
self.link_parameter(self.A)

Expand Down
4 changes: 2 additions & 2 deletions GPy/models/gp_classification.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,10 @@ class GPClassification(GP):
"""

def __init__(self, X, Y, kernel=None,Y_metadata=None):
def __init__(self, X, Y, kernel=None,Y_metadata=None, mean_function=None):
if kernel is None:
kernel = kern.RBF(X.shape[1])

likelihood = likelihoods.Bernoulli()

GP.__init__(self, X=X, Y=Y, kernel=kernel, likelihood=likelihood, inference_method=EP(), name='gp_classification')
GP.__init__(self, X=X, Y=Y, kernel=kernel, likelihood=likelihood, inference_method=EP(), mean_function=mean_function, name='gp_classification')
2 changes: 1 addition & 1 deletion GPy/models/gradient_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,7 @@ def checkgrad(self, target_param=None, verbose=False, step=1e-6, tolerance=1e-3,

#Check every block individually (for ease)
check_passed = [False]*numeric_hess.shape[2]
for block_ind in xrange(numeric_hess.shape[2]):
for block_ind in range(numeric_hess.shape[2]):
#Unless super_plot is set, just plot the first one
p = True if (plot and block_ind == numeric_hess.shape[2]-1) or super_plot else False
if verbose:
Expand Down
Loading

0 comments on commit e1b1faa

Please sign in to comment.