diff --git a/CHANGELOG.md b/CHANGELOG.md index 95a8faf46..796981190 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,18 +15,18 @@ fine grained control over the iterations. * Parallelize L-BFGS minimization runs over the acquisition function. * Implement weighted hamming distance kernel for problems with only categorical dimensions. * New acquisition function `gp_hedge` that probabilistically chooses one of `EI`, `PI` - or `LCB` at every iteration depending upon the cumulative gain. +or `LCB` at every iteration depending upon the cumulative gain. ### Bug fixes * Warnings are now raised if a point is chosen as the candidate optimum multiple - times. +times. * Infinite gradients that were raised in the kernel gradient computation are - now fixed. +now fixed. * Integer dimensions are now normalized to [0, 1] internally in `gp_minimize`. ### API Changes. * The default `acq_optimizer` function has changed from `"auto"` to `"lbfgs"` - in `gp_minimize`. +in `gp_minimize`. ## Version 0.2 diff --git a/setup.py b/setup.py index efec21f2f..5312f3f6d 100644 --- a/setup.py +++ b/setup.py @@ -4,12 +4,12 @@ from distutils.core import setup setup(name='scikit-optimize', - version='0.2', + version='0.3', description='Sequential model-based optimization toolbox.', - long_description=('Scikit-Optimize, or skopt, is a simple and efficient' - ' library for sequential model-based optimization,' - ' accessible to everybody and reusable in various' - ' contexts.'), + long_description=('Scikit-Optimize, or `skopt`, is a simple and efficient' + ' library to minimize (very) expensive and noisy' + ' black-box functions. It implements several methods' + ' for sequential model-based optimization.'), url='https://scikit-optimize.github.io/', license='BSD', author='The scikit-optimize contributors', diff --git a/skopt/__init__.py b/skopt/__init__.py index db3e5be5f..88f05c6dd 100644 --- a/skopt/__init__.py +++ b/skopt/__init__.py @@ -61,7 +61,7 @@ def f(x): from .utils import load, dump -__version__ = "0.2" +__version__ = "0.3" __all__ = (