Skip to content

Commit

Permalink
Merge pull request scipy#5479 from samvrlewis/master
Browse files Browse the repository at this point in the history
ENH: return Jacobian/Hessian from BasinHopping
  • Loading branch information
dlax committed Nov 24, 2015
2 parents dd79682 + 6f1a2ba commit cce8e3e
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 36 deletions.
1 change: 1 addition & 0 deletions THANKS.txt
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,7 @@ Will Monroe for bug fixes.
Bernardo Sulzbach for bug fixes.
Alexander Grigorevskiy for adding extra LAPACK least-square solvers and
modifying linalg.lstsq function accordingly.
Sam Lewis for enhancements to the basinhopping module.

Institutions
------------
Expand Down
52 changes: 28 additions & 24 deletions scipy/optimize/_basinhopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,22 +15,22 @@ class Storage(object):
"""
Class used to store the lowest energy structure
"""
def __init__(self, x, f):
self._add(x, f)
def __init__(self, minres):
self._add(minres)

def _add(self, x, f):
self.x = np.copy(x)
self.f = f
def _add(self, minres):
self.minres = minres
self.minres.x = np.copy(minres.x)

def update(self, x, f):
if f < self.f:
self._add(x, f)
def update(self, minres):
if minres.fun < self.minres.fun:
self._add(minres)
return True
else:
return False

def get_lowest(self):
return self.x, self.f
return self.minres


class BasinHoppingRunner(object):
Expand Down Expand Up @@ -80,7 +80,7 @@ def __init__(self, x0, minimizer, step_taking, accept_tests, disp=False):
print("basinhopping step %d: f %g" % (self.nstep, self.energy))

# initialize storage class
self.storage = Storage(self.x, self.energy)
self.storage = Storage(minres)

if hasattr(minres, "nfev"):
self.res.nfev = minres.nfev
Expand Down Expand Up @@ -108,6 +108,7 @@ def _monte_carlo_step(self):
self.res.minimization_failures += 1
if self.disp:
print("warning: basinhopping: local minimization failure")

if hasattr(minres, "nfev"):
self.res.nfev += minres.nfev
if hasattr(minres, "njev"):
Expand Down Expand Up @@ -145,41 +146,41 @@ def _monte_carlo_step(self):
x_new=x_after_quench, f_old=self.energy,
x_old=self.x)

return x_after_quench, energy_after_quench, accept
return accept, minres

def one_cycle(self):
"""Do one cycle of the basinhopping algorithm
"""
self.nstep += 1
new_global_min = False

xtrial, energy_trial, accept = self._monte_carlo_step()
accept, minres = self._monte_carlo_step()

if accept:
self.energy = energy_trial
self.x = np.copy(xtrial)
new_global_min = self.storage.update(self.x, self.energy)
self.energy = minres.fun
self.x = np.copy(minres.x)
new_global_min = self.storage.update(minres)

# print some information
if self.disp:
self.print_report(energy_trial, accept)
self.print_report(minres.fun, accept)
if new_global_min:
print("found new global minimum on step %d with function"
" value %g" % (self.nstep, self.energy))

# save some variables as BasinHoppingRunner attributes
self.xtrial = xtrial
self.energy_trial = energy_trial
self.xtrial = minres.x
self.energy_trial = minres.fun
self.accept = accept

return new_global_min

def print_report(self, energy_trial, accept):
"""print a status update"""
xlowest, energy_lowest = self.storage.get_lowest()
minres = self.storage.get_lowest()
print("basinhopping step %d: f %g trial_f %g accepted %d "
" lowest_f %g" % (self.nstep, self.energy, energy_trial,
accept, energy_lowest))
accept, minres.fun))


class AdaptiveStepsize(object):
Expand Down Expand Up @@ -370,7 +371,10 @@ def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5,
The optimization result represented as a ``OptimizeResult`` object. Important
attributes are: ``x`` the solution array, ``fun`` the value of the
function at the solution, and ``message`` which describes the cause of
the termination. See `OptimizeResult` for a description of other attributes.
the termination. The ``OptimzeResult`` object returned by the selected
minimizer at the lowest minimum is also contained within this object
and can be accessed through the ``lowest_optimization_result`` attribute.
See `OptimizeResult` for a description of other attributes.
See Also
--------
Expand Down Expand Up @@ -630,10 +634,10 @@ def basinhopping(func, x0, niter=100, T=1.0, stepsize=0.5,
break

# prepare return object
lowest = bh.storage.get_lowest()
res = bh.res
res.x = np.copy(lowest[0])
res.fun = lowest[1]
res.lowest_optimization_result = bh.storage.get_lowest()
res.x = np.copy(res.lowest_optimization_result.x)
res.fun = res.lowest_optimization_result.fun
res.message = message
res.nit = i + 1
return res
Expand Down
59 changes: 47 additions & 12 deletions scipy/optimize/tests/test__basinhopping.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import numpy as np
from numpy import cos, sin

from scipy.optimize import basinhopping
from scipy.optimize import (basinhopping, OptimizeResult)
from scipy.optimize._basinhopping import (
Storage, RandomDisplacement, Metropolis, AdaptiveStepsize)

Expand Down Expand Up @@ -38,6 +38,13 @@ def func2d(x):
df[1] = 2. * x[1] + 0.2
return f, df

def func2d_easyderiv(x):
f = 2.0*x[0]**2 + 2.0*x[0]*x[1] + 2.0*x[1]**2 - 6.0*x[0]
df = np.zeros(2)
df[0] = 4.0*x[0] + 2.0*x[1] - 6.0
df[1] = 2.0*x[0] + 4.0*x[1]

return f, df

class MyTakeStep1(RandomDisplacement):
"""use a copy of displace, but have it set a special parameter to
Expand Down Expand Up @@ -108,8 +115,7 @@ def setUp(self):
"""
self.x0 = (1.0, [1.0, 1.0])
self.sol = (-0.195, np.array([-0.195, -0.1]))
self.upper = (3., [3., 3.])
self.lower = (-3., [-3., -3.])

self.tol = 3 # number of decimal places

self.niter = 100
Expand Down Expand Up @@ -171,6 +177,22 @@ def test_njev(self):
assert_(res.nfev > 0)
assert_equal(res.nfev, res.njev)

def test_jac(self):
# test jacobian returned
minimizer_kwargs = self.kwargs.copy()
# BFGS returns a Jacobian
minimizer_kwargs["method"] = "BFGS"

res = basinhopping(func2d_easyderiv, [0.0, 0.0],
minimizer_kwargs=minimizer_kwargs, niter=self.niter,
disp=self.disp)

assert_(hasattr(res.lowest_optimization_result, "jac"))

#in this case, the jacobian is just [df/dx, df/dy]
_, jacobian = func2d_easyderiv(res.x)
assert_almost_equal(res.lowest_optimization_result.jac, jacobian, self.tol)

def test_2d_nograd(self):
# test 2d minimizations without gradient
i = 1
Expand Down Expand Up @@ -273,20 +295,33 @@ class Test_Storage(TestCase):
def setUp(self):
self.x0 = np.array(1)
self.f0 = 0
self.storage = Storage(self.x0, self.f0)

minres = OptimizeResult()
minres.x = self.x0
minres.fun = self.f0

self.storage = Storage(minres)

def test_higher_f_rejected(self):
ret = self.storage.update(self.x0 + 1, self.f0 + 1)
x, f = self.storage.get_lowest()
assert_equal(self.x0, x)
assert_equal(self.f0, f)
new_minres = OptimizeResult()
new_minres.x = self.x0 + 1
new_minres.fun = self.f0 + 1

ret = self.storage.update(new_minres)
minres = self.storage.get_lowest()
assert_equal(self.x0, minres.x)
assert_equal(self.f0, minres.fun)
assert_(not ret)

def test_lower_f_accepted(self):
ret = self.storage.update(self.x0 + 1, self.f0 - 1)
x, f = self.storage.get_lowest()
assert_(self.x0 != x)
assert_(self.f0 != f)
new_minres = OptimizeResult()
new_minres.x = self.x0 + 1
new_minres.fun = self.f0 - 1

ret = self.storage.update(new_minres)
minres = self.storage.get_lowest()
assert_(self.x0 != minres.x)
assert_(self.f0 != minres.fun)
assert_(ret)


Expand Down

0 comments on commit cce8e3e

Please sign in to comment.