From fd5784f565cd790ea0940bc433db8b0ed62239d0 Mon Sep 17 00:00:00 2001 From: eggenspk Date: Fri, 16 Jul 2021 15:27:22 +0200 Subject: [PATCH] Rename MF facade --- README.md | 4 +-- examples/{BOHB4HPO_mlp.py => SMAC4MF_mlp.py} | 14 ++++----- ..._instances.py => SMAc4MF_sgd_instances.py} | 31 ++++++++++--------- ...{smac_bohb_facade.py => smac_mf_facade.py} | 7 +++-- smac/intensification/successive_halving.py | 9 ++++-- ...b4hpo_facade.py => test_smac4mf_facade.py} | 6 ++-- 6 files changed, 38 insertions(+), 33 deletions(-) rename examples/{BOHB4HPO_mlp.py => SMAC4MF_mlp.py} (94%) rename examples/{BOHB4HPO_sgd_instances.py => SMAc4MF_sgd_instances.py} (83%) rename smac/facade/{smac_bohb_facade.py => smac_mf_facade.py} (91%) rename test/test_facade/{test_bohb4hpo_facade.py => test_smac4mf_facade.py} (89%) diff --git a/README.md b/README.md index 68e9b4aa3..6dae84967 100644 --- a/README.md +++ b/README.md @@ -143,8 +143,8 @@ We provide a bunch of examples in the [examples folder](examples), such as: * Optimization of an MLP * [parallel_sh_mlp.py](https://automl.github.io/SMAC3/master/examples/parallel_sh_mlp.html#sphx-glr-examples-parallel-sh-mlp-py) - Parallel Successive Halving * [hyperband_mlp.py](https://automl.github.io/SMAC3/master/examples/hyperband_mlp.html#sphx-glr-examples-hyperband-mlp-py) - Hyperband - * [BOHB4HPO_mlp.py](https://automl.github.io/SMAC3/master/examples/BOHB4HPO_mlp.html#sphx-glr-examples-bohb4hpo-mlp-py) - BOHB - * [BOHB4HPO_sgd_instances.py](https://automl.github.io/SMAC3/master/examples/BOHB4HPO_sgd_instances.html#sphx-glr-examples-bohb4hpo-sgd-instances-py) - BOHB across instances + * [SMAC4MF_mlp.py](https://automl.github.io/SMAC3/master/examples/SMAC4MF_mlp.html#sphx-glr-examples-smac4mf-mlp-py) - SMAC4MF + * [SMAC4MF_sgd_instances.py](https://automl.github.io/SMAC3/master/examples/SMAC4MF_sgd_instances.html#sphx-glr-examples-smac4mf-sgd-instances-py) - SMAC4MF across instances An overview of all examples can be seen in our [documentation](https://automl.github.io/SMAC3/master/examples/index.html). diff --git a/examples/BOHB4HPO_mlp.py b/examples/SMAC4MF_mlp.py similarity index 94% rename from examples/BOHB4HPO_mlp.py rename to examples/SMAC4MF_mlp.py index 65197b8f3..267d27028 100644 --- a/examples/BOHB4HPO_mlp.py +++ b/examples/SMAC4MF_mlp.py @@ -1,7 +1,7 @@ """ -=========================== -Optimizing an MLP with BOHB -=========================== +=============================== +Optimizing an MLP with SMAC4MFB +=============================== An example for the usage of Hyperband intensifier in SMAC. We optimize a simple MLP on the digits dataset using "Hyperband" intensification. @@ -23,7 +23,7 @@ from sklearn.neural_network import MLPClassifier from smac.configspace import ConfigurationSpace -from smac.facade.smac_bohb_facade import BOHB4HPO +from smac.facade.smac_mf_facade import SMAC4MF from smac.scenario.scenario import Scenario digits = load_digits() @@ -126,9 +126,9 @@ def mlp_from_cfg(cfg, seed, instance, budget, **kwargs): # intensifier parameters intensifier_kwargs = {'initial_budget': 5, 'max_budget': max_iters, 'eta': 3} # To optimize, we pass the function to the SMAC-object -smac = BOHB4HPO(scenario=scenario, rng=np.random.RandomState(42), - tae_runner=mlp_from_cfg, - intensifier_kwargs=intensifier_kwargs) # all arguments related to intensifier can be passed like this +smac = SMAC4MF(scenario=scenario, rng=np.random.RandomState(42), + tae_runner=mlp_from_cfg, + intensifier_kwargs=intensifier_kwargs) # all arguments related to intensifier can be passed like this # Example call of the function with default values # It returns: Status, Cost, Runtime, Additional Infos diff --git a/examples/BOHB4HPO_sgd_instances.py b/examples/SMAc4MF_sgd_instances.py similarity index 83% rename from examples/BOHB4HPO_sgd_instances.py rename to examples/SMAc4MF_sgd_instances.py index eed6da2d8..16416c611 100644 --- a/examples/BOHB4HPO_sgd_instances.py +++ b/examples/SMAc4MF_sgd_instances.py @@ -1,16 +1,17 @@ """ -========================================================= -Optimizing average cross-validation performance with BOHB -========================================================= +============================================================ +Optimizing average cross-validation performance with SMAC4MF +============================================================ An example for the usage of Hyperband intensifier in SMAC with multiple instances. We optimize a SGD classifier on the digits dataset as multiple binary classification problems using "Hyperband" intensification. We split the digits dataset (10 classes) into 45 binary datasets. -In this example, we use instances as the budget in hyperband and optimize the average cross validation accuracy. -An "Instance" represents a specific scenario/condition (eg: different datasets, subsets, transformations) -for the algorithm to run. SMAC then returns the algorithm that had the best performance across all the instances. -In this case, an instance is a binary dataset i.e., digit-2 vs digit-3. +In this example, we use instances as the budget in hyperband and optimize the average cross +validation accuracy. An "Instance" represents a specific scenario/condition (eg: different datasets, +subsets, transformations) for the algorithm to run. SMAC then returns the algorithm that had the +best performance across all the instances. In this case, an instance is a binary dataset i.e., +digit-2 vs digit-3. """ import itertools @@ -26,11 +27,11 @@ # Import ConfigSpace and different types of parameters from smac.configspace import ConfigurationSpace -from smac.facade.smac_bohb_facade import BOHB4HPO +from smac.facade.smac_mf_facade import SMAC4MF # Import SMAC-utilities from smac.scenario.scenario import Scenario -# We load the MNIST-dataset (a widely used benchmark) and split it into a collection of binary datasets +# We load the MNIST-dataset (a widely used benchmark) and split it into a list of binary datasets digits = datasets.load_digits() instances = [[str(a) + str(b)] for a, b in itertools.combinations(digits.target_names, 2)] @@ -73,9 +74,9 @@ def sgd_from_cfg(cfg, seed, instance): warnings.filterwarnings('ignore', category=ConvergenceWarning) # SGD classifier using given configuration - clf = SGDClassifier(loss='log', penalty='elasticnet', alpha=cfg['alpha'], l1_ratio=cfg['l1_ratio'], - learning_rate=cfg['learning_rate'], eta0=cfg['eta0'], - max_iter=30, early_stopping=True, random_state=seed) + clf = SGDClassifier(loss='log', penalty='elasticnet', alpha=cfg['alpha'], + l1_ratio=cfg['l1_ratio'], learning_rate=cfg['learning_rate'], + eta0=cfg['eta0'], max_iter=30, early_stopping=True, random_state=seed) # get instance data, target = generate_instances(int(instance[0]), int(instance[1])) @@ -120,9 +121,9 @@ def sgd_from_cfg(cfg, seed, instance): } # To optimize, we pass the function to the SMAC-object -smac = BOHB4HPO(scenario=scenario, rng=np.random.RandomState(42), - tae_runner=sgd_from_cfg, - intensifier_kwargs=intensifier_kwargs) # all arguments related to intensifier can be passed like this +smac = SMAC4MF(scenario=scenario, rng=np.random.RandomState(42), + tae_runner=sgd_from_cfg, + intensifier_kwargs=intensifier_kwargs) # all arguments related to intensifier can be passed like this # Example call of the function # It returns: Status, Cost, Runtime, Additional Infos diff --git a/smac/facade/smac_bohb_facade.py b/smac/facade/smac_mf_facade.py similarity index 91% rename from smac/facade/smac_bohb_facade.py rename to smac/facade/smac_mf_facade.py index 448ec03ae..7e27510a7 100644 --- a/smac/facade/smac_bohb_facade.py +++ b/smac/facade/smac_mf_facade.py @@ -10,9 +10,10 @@ __license__ = "3-clause BSD" -class BOHB4HPO(SMAC4HPO): +class SMAC4MF(SMAC4HPO): """ - Facade to use BOHB i.e., SMAC with a Hyperband intensifier for hyperparameter optimization + Facade to use SMAC with a Hyperband intensifier for hyperparameter optimization using multiple + fidelities see smac.facade.smac_Facade for API This facade overwrites options available via the SMAC facade @@ -64,5 +65,5 @@ def __init__(self, **kwargs: typing.Any): # activate predict incumbent self.solver.epm_chooser.predict_x_best = True - # BOHB requires at least D+1 no. of samples to build a model + # SMAC4MF requires at least D+1 no. of samples to build a model self.solver.epm_chooser.min_samples_model = len(scenario.cs.get_hyperparameters()) + 1 diff --git a/smac/intensification/successive_halving.py b/smac/intensification/successive_halving.py index e2f53a1fd..ad44bff31 100644 --- a/smac/intensification/successive_halving.py +++ b/smac/intensification/successive_halving.py @@ -31,7 +31,7 @@ class _SuccessiveHalving(AbstractRacer): The `SuccessiveHalving` class can create multiple `_SuccessiveHalving` objects, to allow parallelism in the method (up to the number of workers available). The user interface is expected to be `SuccessiveHalving`, yet this class (`_SuccessiveHalving`) contains the - actual single worker implementation of the BOHB method. + actual single worker implementation of the SMAC4MF method. Successive Halving intensifier (and Hyperband) can operate on two kinds of budgets: @@ -377,6 +377,8 @@ def process_results(self, if result.status == StatusType.CAPPED and run_info.config == self.running_challenger: self.curr_inst_idx[run_info.config] = np.inf else: + self._ta_time = self._ta_time # type: float # make mypy happy + self.num_run = self.num_run # type: int # make mypy happy self._ta_time += result.time self.num_run += 1 @@ -583,6 +585,7 @@ def get_next_run(self, # We see a challenger for the first time, so no # instance has been launched self.curr_inst_idx[challenger] = 0 + self._chall_indx = self._chall_indx # type: int # make mypy happy self._chall_indx += 1 self.running_challenger = challenger @@ -1058,8 +1061,8 @@ class SuccessiveHalving(ParallelScheduler): Examples for successive halving (and hyperband) can be found here: * Runtime objective and multiple instances *(instances as budget)*: `examples/spear_qcp/SMAC4AC_SH_spear_qcp.py` - * Quality objective and multiple instances *(instances as budget)*: `examples/BOHB4HPO_sgd_instances.py` - * Quality objective and single instance *(real-valued budget)*: `examples/BOHB4HPO_mlp.py` + * Quality objective and multiple instances *(instances as budget)*: `examples/SMAC4MF_sgd_instances.py` + * Quality objective and single instance *(real-valued budget)*: `examples/SMAC4MF_mlp.py` This class instantiates `_SuccessiveHalving` objects on a need basis, that is, to prevent workers from being idle. The actual logic that implements the Successive halving method diff --git a/test/test_facade/test_bohb4hpo_facade.py b/test/test_facade/test_smac4mf_facade.py similarity index 89% rename from test/test_facade/test_bohb4hpo_facade.py rename to test/test_facade/test_smac4mf_facade.py index 14da6e02b..2ed94617d 100644 --- a/test/test_facade/test_bohb4hpo_facade.py +++ b/test/test_facade/test_smac4mf_facade.py @@ -6,12 +6,12 @@ from smac.configspace import ConfigurationSpace -from smac.facade.smac_bohb_facade import BOHB4HPO +from smac.facade.smac_mf_facade import SMAC4MF from smac.initial_design.random_configuration_design import RandomConfigurations from smac.scenario.scenario import Scenario -class TestBOHBFacade(unittest.TestCase): +class TestSMAC4MFFacade(unittest.TestCase): def setUp(self): self.cs = ConfigurationSpace() @@ -34,7 +34,7 @@ def test_initializations(self): cs.add_hyperparameter(UniformFloatHyperparameter('x%d' % (i + 1), 0, 1)) scenario = Scenario({'cs': cs, 'run_obj': 'quality'}) hb_kwargs = {'initial_budget': 1, 'max_budget': 3} - facade = BOHB4HPO(scenario=scenario, intensifier_kwargs=hb_kwargs) + facade = SMAC4MF(scenario=scenario, intensifier_kwargs=hb_kwargs) self.assertIsInstance(facade.solver.initial_design, RandomConfigurations) # ensure number of samples required is D+1