diff --git a/pymoo/algorithms/soo/nonconvex/de_bak.py b/pymoo/algorithms/soo/nonconvex/de_bak.py deleted file mode 100755 index 406b63767..000000000 --- a/pymoo/algorithms/soo/nonconvex/de_bak.py +++ /dev/null @@ -1,291 +0,0 @@ -""" - -Differential Evolution (DE) - --------------------------------- Description ------------------------------- - - - --------------------------------- References -------------------------------- - -[1] J. Blank and K. Deb, pymoo: Multi-Objective Optimization in Python, in IEEE Access, -vol. 8, pp. 89497-89509, 2020, DOI: 10.1109/ACCESS.2020.2990567 - --------------------------------- License ----------------------------------- - - ----------------------------------------------------------------------------- -""" - -import numpy as np - -from pymoo.algorithms.base.genetic import GeneticAlgorithm -from pymoo.algorithms.soo.nonconvex.ga import FitnessSurvival -from pymoo.core.duplicate import NoDuplicateElimination -from pymoo.core.infill import InfillCriterion -from pymoo.core.mixed import MixedVariableMating, MixedVariableSampling -from pymoo.core.parameters import get_params, flatten -from pymoo.core.population import Population -from pymoo.core.problem import Problem -from pymoo.core.replacement import ImprovementReplacement -from pymoo.core.variable import Choice, get, Binary -from pymoo.core.variable import Real -from pymoo.docs import parse_doc_string -from pymoo.operators.crossover.binx import mut_binomial -from pymoo.operators.crossover.expx import mut_exp -from pymoo.operators.mutation.pm import PM -from pymoo.operators.param_control import EvolutionaryParameterControl, AgeBasedTournamentSelection, NoParameterControl -from pymoo.operators.repair.bounds_repair import repair_random_init -from pymoo.operators.sampling.rnd import FloatRandomSampling -from pymoo.operators.selection.rnd import fast_fill_random -from pymoo.util.display import SingleObjectiveDisplay -from pymoo.util.misc import where_is_what -from pymoo.util.termination.default import SingleObjectiveDefaultTermination - - -# ========================================================================================================= -# Crossover -# ========================================================================================================= - -def de_differential(X, F, jitter, alpha=0.001): - n_parents, n_matings, n_var = X.shape - assert n_parents % 2 == 1, "For the differential an odd number of values need to be provided" - - # the differentials from each pair - delta = np.zeros((n_matings, n_var)) - - # for each difference of the differences - for i in range(1, n_parents, 2): - # create the weight vectors with jitter to give some variation - _F = F[:, None].repeat(n_var, axis=1) - _F[jitter] *= (1 + alpha * (np.random.random((jitter.sum(), n_var)) - 0.5)) - - # add the difference to the vector - delta += _F * (X[i] - X[i + 1]) - - # now add the differentials to the first parent - Xp = X[0] + delta - - return Xp - - -# ========================================================================================================= -# Different Variants of Differential Evolution -# ========================================================================================================= - - -class Variant(InfillCriterion): - - def __init__(self, - selection="best", - n_diffs=1, - F=0.5, - crossover="bin", - CR=0.75, - jitter=False, - prob_mut=0.1, - **kwargs): - - super().__init__(**kwargs) - self.selection = Choice(selection, options=["best"], all=["rand", "best", "target-to-best"]) - self.n_diffs = Choice(n_diffs, options=[1], all=[1, 2]) - self.F = Real(F, bounds=(0.4, 0.6), strict=(0.0, None)) - self.crossover = Choice(crossover, ["bin"], all=["bin", "exp", "hypercube", "line"]) - self.CR = Real(CR, bounds=(0.3, 0.7), strict=(0.0, 1.0)) - self.jitter = Choice(jitter, options=[False], all=[True, False]) - - self.mutation = PM(at_least_once=True) - self.mutation.eta = 20 - # self.mutation.prob = prob_mut - self.mutation.prob = Real(prob_mut, bounds=(0.05, 0.35)) - # self.mutation.prob_var = Real(None, bounds=(0.0, 0.5)) - - def do(self, problem, pop, n_offsprings, algorithm=None, **kwargs): - - # find the different groups of selection schemes and order them by category - sel, n_diffs = get(self.selection, self.n_diffs, size=n_offsprings) - H = where_is_what(zip(sel, n_diffs)) - - # get the parameters used for reproduction during the crossover - F, CR, jitter = get(self.F, self.CR, self.jitter, size=n_offsprings) - - # the `target` vectors which will be recombined - X = pop.get("X") - - # the `donor` vector which will be obtained through the differential equation - donor = np.empty_like(X) - - # for each type defined by the type and number of differentials - for (sel_type, n_diffs), targets in H.items(): - - # the number of offsprings created in this run - n_matings, n_parents = len(targets), 1 + 2 * n_diffs - - # create the parents array - P = np.full([n_matings, n_parents], -1) - - itself = np.array(targets)[:, None] - - best = lambda: np.random.choice(np.where(pop.get("rank") == 0)[0], replace=True, size=n_matings) - - if sel_type == "rand": - fast_fill_random(P, len(pop), columns=range(n_parents), Xp=itself) - elif sel_type == "best": - P[:, 0] = best() - fast_fill_random(P, len(pop), columns=range(1, n_parents), Xp=itself) - elif sel_type == "target-to-best": - P[:, 0] = targets - P[:, 1] = best() - fast_fill_random(P, len(pop), columns=range(2, n_parents), Xp=itself) - else: - raise Exception("Unknown selection method.") - - # get the values of the parents in the design space - XX = np.swapaxes(X[P], 0, 1) - - # do the differential crossover to create the donor vector - Xp = de_differential(XX, F[targets], jitter[targets]) - - # make sure everything stays in bounds - if problem.has_bounds(): - Xp = repair_random_init(Xp, XX[0], *problem.bounds()) - - # set the donors (the one we have created in this step) - donor[targets] = Xp - - # the `trial` created by by recombining target and donor - trial = np.empty_like(X) - - crossover = get(self.crossover, size=n_offsprings) - for name, K in where_is_what(crossover).items(): - - _target = X[K] - _donor = donor[K] - _CR = CR[K] - - if name == "bin": - M = mut_binomial(len(K), problem.n_var, _CR, at_least_once=True) - _trial = np.copy(_target) - _trial[M] = _donor[M] - elif name == "exp": - M = mut_exp(n_offsprings, problem.n_var, _CR, at_least_once=True) - _trial = np.copy(_target) - _trial[M] = _donor[M] - elif name == "line": - w = np.random.random((len(K), 1)) * _CR[:, None] - _trial = _target + w * (_donor - _target) - elif name == "hypercube": - w = np.random.random((len(K), _target.shape[1])) * _CR[:, None] - _trial = _target + w * (_donor - _target) - else: - raise Exception(f"Unknown crossover variant: {name}") - - trial[K] = _trial - - # create the population - off = Population.new(X=trial) - - # do the mutation which helps to add some more diversity - off = self.mutation.do(problem, off) - - # repair the individuals if necessary - disabled if repair is NoRepair - off = self.repair.do(problem, off, **kwargs) - - return off - - -# ========================================================================================================= -# Implementation -# ========================================================================================================= - - -class DE(GeneticAlgorithm): - - def __init__(self, - pop_size=100, - n_offsprings=None, - sampling=FloatRandomSampling(), - variant=None, - display=SingleObjectiveDisplay(), - control=None, - **kwargs - ): - - if variant is None: - if "control" not in kwargs: - kwargs["control"] = EvolutionaryParameterControl - variant = Variant(**kwargs) - - elif isinstance(variant, str): - try: - _, selection, n_diffs, crossover = variant.split("/") - variant = Variant(selection=selection, n_diffs=n_diffs, crossover=crossover, **kwargs) - except: - raise Exception("Please provide a valid variant: DE///") - - super().__init__(pop_size=pop_size, - n_offsprings=n_offsprings, - sampling=sampling, - mating=variant, - survival=None, - display=display, - eliminate_duplicates=False, - **kwargs) - - self.default_termination = SingleObjectiveDefaultTermination() - - self.control = control - self.prev_params = None - self.params = None - - def _initialize_advance(self, infills=None, **kwargs): - FitnessSurvival().do(self.problem, self.pop, return_indices=True) - - def _infill(self): - - if self.control is not NoParameterControl: - - omega = flatten(get_params(self.mating)) - - problem = Problem(vars=omega) - - if self.prev_params is None: - self.params = MixedVariableSampling().do(problem, self.pop_size) - self.prev_params = Population.create(*self.params) - else: - selection = AgeBasedTournamentSelection() - mating = MixedVariableMating(selection=selection, eliminate_duplicates=NoDuplicateElimination()) - self.params = mating.do(problem, self.prev_params, self.n_offsprings) - - self.params.set("n_gen", self.n_gen) - - P = self.params.get("X") - for name, param in omega.items(): - param.set(np.array([d[name] for d in P])) - - infills = self.mating.do(self.problem, self.pop, self.n_offsprings, algorithm=self) - - return infills - - def _advance(self, infills=None, **kwargs): - assert infills is not None, "This algorithms uses the AskAndTell interface thus infills must to be provided." - prev_pop = Population.create(*self.pop) - - # replace the individuals with the corresponding parents from the mating - ImprovementReplacement().do(self.problem, self.pop, infills, inplace=True) - - # update the information regarding the current population - FitnessSurvival().do(self.problem, self.pop) - - if self.control is not NoParameterControl: - - has_not_improved = self.pop == prev_pop - self.params[has_not_improved] = self.prev_params[has_not_improved] - self.prev_params = self.params - - def _set_optimum(self, **kwargs): - k = self.pop.get("rank") == 0 - self.opt = self.pop[k] - - -parse_doc_string(DE.__init__) diff --git a/pymoo/algorithms/soo/nonconvex/nelder_mead.py b/pymoo/algorithms/soo/nonconvex/nelder.py similarity index 100% rename from pymoo/algorithms/soo/nonconvex/nelder_mead.py rename to pymoo/algorithms/soo/nonconvex/nelder.py diff --git a/pymoo/algorithms/soo/nonconvex/pattern_search.py b/pymoo/algorithms/soo/nonconvex/pattern.py similarity index 100% rename from pymoo/algorithms/soo/nonconvex/pattern_search.py rename to pymoo/algorithms/soo/nonconvex/pattern.py diff --git a/pymoo/algorithms/param_tuning.py b/pymoo/algorithms/tuning.py similarity index 100% rename from pymoo/algorithms/param_tuning.py rename to pymoo/algorithms/tuning.py diff --git a/pymoo/util/termination/__init__.py b/pymoo/termination/__init__.py similarity index 100% rename from pymoo/util/termination/__init__.py rename to pymoo/termination/__init__.py diff --git a/pymoo/util/termination/collection.py b/pymoo/termination/collection.py similarity index 52% rename from pymoo/util/termination/collection.py rename to pymoo/termination/collection.py index 4019036cc..565045791 100644 --- a/pymoo/util/termination/collection.py +++ b/pymoo/termination/collection.py @@ -8,8 +8,5 @@ def __init__(self, *args) -> None: super().__init__() self.terminations = args - def _do_continue(self, algorithm): - for term in self.terminations: - if not term.do_continue(algorithm): - return False - return True + def _update(self, algorithm): + return min([termination.update(algorithm) for termination in self.terminations]) diff --git a/pymoo/termination/cv.py b/pymoo/termination/cv.py new file mode 100644 index 000000000..9b9470e4a --- /dev/null +++ b/pymoo/termination/cv.py @@ -0,0 +1,38 @@ +from pymoo.core.termination import Termination +from pymoo.util.termination.delta import DeltaToleranceTermination + + +class ConstraintViolationTermination(DeltaToleranceTermination): + + def __init__(self, tol=1e-6, **kwargs): + super().__init__(tol, **kwargs) + + def _update(self, algorithm): + if algorithm.problem.has_constraints(): + return super()._update(algorithm) + else: + return 1.0 + + def _delta(self, prev, current): + return max(0, prev - current) + + def _data(self, algorithm): + return algorithm.opt.get("CV").min() + + +class UntilFeasibleTermination(Termination): + + def __init__(self) -> None: + super().__init__() + self.initial_cv = None + + def _update(self, algorithm): + cv = algorithm.opt.get("CV").min() + + if self.initial_cv is None: + if cv <= 0: + self.initial_cv = 1e-32 + else: + self.initial_cv = cv + + return 1 - cv / self.initial_cv diff --git a/pymoo/termination/default.py b/pymoo/termination/default.py new file mode 100644 index 000000000..2cea77c99 --- /dev/null +++ b/pymoo/termination/default.py @@ -0,0 +1,40 @@ +from pymoo.core.termination import Termination +from pymoo.util.termination.cv import ConstraintViolationTermination +from pymoo.util.termination.ftol import SingleObjectiveSpaceTermination, MultiObjectiveSpaceTermination +from pymoo.util.termination.robust import RobustTermination +from pymoo.util.termination.xtol import DesignSpaceTermination + + +class DefaultTermination(Termination): + + def __init__(self, x, cv, f) -> None: + super().__init__() + self.x = x + self.cv = cv + self.f = f + + def _update(self, algorithm): + cv = self.cv.update(algorithm) + x = self.x.update(algorithm) + f = self.f.update(algorithm) + return min(cv, max(x, f)) + + +class DefaultSingleObjectiveTermination(DefaultTermination): + + def __init__(self) -> None: + x = RobustTermination(DesignSpaceTermination(1e-8), 30) + cv = RobustTermination(ConstraintViolationTermination(1e-8), 50) + f = RobustTermination(SingleObjectiveSpaceTermination(1e-6), 30) + super().__init__(x, cv, f) + + +class DefaultMultiObjectiveTermination(DefaultTermination): + + def __init__(self, n_skip=5) -> None: + x = RobustTermination(DesignSpaceTermination(1e-8, n_skip=n_skip), 30) + cv = RobustTermination(ConstraintViolationTermination(1e-8, n_skip=n_skip), 50) + f = RobustTermination(MultiObjectiveSpaceTermination(0.0025, n_skip=n_skip), 50) + super().__init__(x, cv, f) + + diff --git a/pymoo/termination/delta.py b/pymoo/termination/delta.py new file mode 100644 index 000000000..e27a11f76 --- /dev/null +++ b/pymoo/termination/delta.py @@ -0,0 +1,65 @@ +import math +from abc import abstractmethod + +from pymoo.core.termination import Termination + + +class DeltaToleranceTermination(Termination): + + def __init__(self, tol, n_skip=0, log=True): + super().__init__() + + # the tolerance threshold the difference (delta) to be under + assert tol >= 0 + self.tol = tol + + self.log = log + + # the previous values to calculate the difference + self.data = None + + # a counter of update calls + self.counter = 0 + + # whether some updates should be skipped + self.n_skip = n_skip + + def _update(self, algorithm): + + # the object from the previous iteration + prev = self.data + + # and the one from the current iteration + current = self._data(algorithm) + + # if there is no previous element to use + if prev is None: + perc = 0.0 + elif self.counter > 0 and self.counter % (self.n_skip + 1) != 0: + perc = self.perc + else: + tol = self.tol + delta = self._delta(prev, current) + + if delta <= tol: + return 1.0 + else: + if self.log: + tol, delta = math.log(tol), math.log(delta) + perc = 1 / (1 + (delta - tol)) + + # remember the data from the current iteration and set it to data + self.data = current + + # increase the function call counter + self.counter += 1 + + return perc + + @abstractmethod + def _delta(self, prev, current): + pass + + @abstractmethod + def _data(self, algorithm): + pass diff --git a/pymoo/termination/ftol.py b/pymoo/termination/ftol.py new file mode 100644 index 000000000..f43ce6261 --- /dev/null +++ b/pymoo/termination/ftol.py @@ -0,0 +1,115 @@ +import numpy as np + +from pymoo.indicators.igd import IGD +from pymoo.util.normalization import normalize +from pymoo.util.termination.delta import DeltaToleranceTermination + + +def calc_delta(a, b): + return np.max(np.abs((a - b))) + + +def calc_delta_norm(a, b, norm): + return np.max(np.abs((a - b) / norm)) + + +class SingleObjectiveSpaceTermination(DeltaToleranceTermination): + + def __init__(self, tol=1e-6, **kwargs) -> None: + super().__init__(tol, **kwargs) + + def _delta(self, prev, current): + return max(0, prev - current) + + def _data(self, algorithm): + return algorithm.opt.get("F").min() + + +class MultiObjectiveSpaceTermination(DeltaToleranceTermination): + + def __init__(self, tol=0.0025, **kwargs): + super().__init__(tol, **kwargs) + + def _data(self, algorithm): + F = algorithm.opt.get("F") + return dict(ideal=F.min(axis=0), nadir=F.max(axis=0), F=F) + + def _delta(self, prev, current): + + # this is the range between the nadir and the ideal point + norm = current["nadir"] - current["ideal"] + + # if the range is degenerated (very close to zero) - disable normalization by dividing by one + norm[norm < 1e-32] = 1.0 + + # calculate the change from last to current in ideal and nadir point + delta_ideal = calc_delta_norm(current["ideal"], prev["ideal"], norm) + delta_nadir = calc_delta_norm(current["nadir"], prev["nadir"], norm) + + # get necessary data from the current population + c_F, c_ideal, c_nadir = current["F"], current["ideal"], current["nadir"] + + # normalize last and current with respect to most recent ideal and nadir + c_N = normalize(c_F, c_ideal, c_nadir) + l_N = normalize(prev["F"], c_ideal, c_nadir) + + # calculate IGD from one to another + delta_f = IGD(c_N).do(l_N) + + return max(delta_ideal, delta_nadir, delta_f) + + + + + +# class MultiObjectiveSpaceToleranceTerminationWithRenormalization(MultiObjectiveSpaceTermination): +# +# def __init__(self, +# n_last=30, +# all_to_current=False, +# sliding_window=True, +# perf_indicator="igd", +# **kwargs) -> None: +# +# super().__init__(n_last=n_last, +# truncate_metrics=False, +# truncate_data=False, +# **kwargs) +# self.data = [] +# self.all_to_current = all_to_current +# self.sliding_window = sliding_window +# self.perf_indicator = perf_indicator +# +# def _metric(self, data): +# ret = super()._metric(data) +# +# if not self.sliding_window: +# data = self.data[-self.metric_window_size:] +# +# # get necessary data from the current population +# current = data[-1] +# c_F, c_ideal, c_nadir = current["F"], current["ideal"], current["nadir"] +# +# # normalize all previous generations with respect to current ideal and nadir +# N = [normalize(e["F"], c_ideal, c_nadir) for e in data] +# +# # check if the movement of all points is significant +# if self.all_to_current: +# c_N = normalize(c_F, c_ideal, c_nadir) +# if self.perf_indicator == "igd": +# delta_f = [IGD(c_N).do(N[k]) for k in range(len(N))] +# elif self.perf_indicator == "hv": +# hv = Hypervolume(ref_point=np.ones(c_F.shape[1])) +# delta_f = [hv.do(N[k]) for k in range(len(N))] +# else: +# delta_f = [IGD(N[k + 1]).do(N[k]) for k in range(len(N) - 1)] +# +# ret["delta_f"] = delta_f +# +# return ret +# +# def _decide(self, metrics): +# delta_ideal = [e["delta_ideal"] for e in metrics] +# delta_nadir = [e["delta_nadir"] for e in metrics] +# delta_f = [max(e["delta_f"]) for e in metrics] +# return max(max(delta_ideal), max(delta_nadir), max(delta_f)) > self.tol diff --git a/pymoo/termination/indicator.py b/pymoo/termination/indicator.py new file mode 100644 index 000000000..198f1f599 --- /dev/null +++ b/pymoo/termination/indicator.py @@ -0,0 +1,55 @@ +from math import log + +from pymoo.core.termination import Termination + + +class IndicatorTermination(Termination): + + def __init__(self, indicator, threshold, goal, log=True, **kwargs) -> None: + super().__init__() + + # the indicator to be used + self.indicator = indicator + + # define the threshold for termination + self.threshold = threshold + + # what is the optimization goal for this indicator + self.goal = goal + assert goal in ["minimize", "maximize"] + + # optional parameters when the indicator calculation is performed + self.kwargs = kwargs + + # initial the minimum and maximum values of the indicator + self._min = float("inf") + self._max = -float("inf") + + # whether a log scale for convergence should be used + self.log = log + + def _update(self, algorithm): + + # get the objective space values + F = algorithm.opt.get("F") + + # get the resulting value from the indicator + v = self.indicator.do(F, **self.kwargs) + + threshold = self.threshold + + # update the minimum and maximum boundary ranges + self._min = min(self._min, v) + self._max = max(self._max, v) + _min, _max = self._min, self._max + + if self.log: + threshold, _min, _max, v = log(threshold), log(_min), log(_max), log(v) + + # depending on the goal either set the percentage + if self.goal == "minimize": + perc = 1 - (v - threshold) / (_max - threshold) + else: + perc = (v - _min) / (threshold - _min) + + return perc diff --git a/pymoo/termination/max_eval.py b/pymoo/termination/max_eval.py new file mode 100644 index 000000000..eb863c78d --- /dev/null +++ b/pymoo/termination/max_eval.py @@ -0,0 +1,11 @@ +from pymoo.core.termination import Termination + + +class MaximumFunctionCallTermination(Termination): + + def __init__(self, n_max_evals=float("inf")) -> None: + super().__init__() + self.n_max_evals = n_max_evals + + def _update(self, algorithm): + return algorithm.evaluator.n_eval / self.n_max_evals diff --git a/pymoo/termination/max_gen.py b/pymoo/termination/max_gen.py new file mode 100644 index 000000000..9425ef529 --- /dev/null +++ b/pymoo/termination/max_gen.py @@ -0,0 +1,12 @@ +from pymoo.core.termination import Termination + + +class MaximumGenerationTermination(Termination): + + def __init__(self, n_max_gen=float("inf")) -> None: + super().__init__() + self.n_max_gen = n_max_gen + + def _update(self, algorithm): + return algorithm.n_gen / self.n_max_gen + diff --git a/pymoo/util/termination/max_time.py b/pymoo/termination/max_time.py similarity index 70% rename from pymoo/util/termination/max_time.py rename to pymoo/termination/max_time.py index 297b51c34..3dc3ca553 100644 --- a/pymoo/util/termination/max_time.py +++ b/pymoo/termination/max_time.py @@ -9,7 +9,6 @@ class TimeBasedTermination(Termination): def __init__(self, max_time) -> None: super().__init__() self.start = None - self.now = None if isinstance(max_time, str): self.max_time = time_to_int(max_time) @@ -18,10 +17,9 @@ def __init__(self, max_time) -> None: else: raise Exception("Either provide the time as a string or an integer.") - def do_continue(self, algorithm): - if self.start is None: - self.start = algorithm.start_time - - self.now = time.time() - return self.now - self.start < self.max_time + def setup(self, _): + self.start = time.time() + def _update(self, algorithm): + elapsed = time.time() - self.start + return elapsed / self.max_time diff --git a/pymoo/termination/robust.py b/pymoo/termination/robust.py new file mode 100644 index 000000000..604c5a47c --- /dev/null +++ b/pymoo/termination/robust.py @@ -0,0 +1,35 @@ +from pymoo.core.termination import Termination +from pymoo.util.sliding_window import SlidingWindow + + +class RobustTermination(Termination): + + def __init__(self, + termination, + n=30, + **kwargs + ) -> None: + """ + + Parameters + ---------- + + termination : Termination + The termination criterion that shall become robust + + n : int + The number of last generations to be considered for termination. + + """ + super().__init__(**kwargs) + + # create a collection in case number of max generation or evaluations is used + self.termination = termination + + # the history calculated also in a sliding window + self.history = SlidingWindow(n) + + def _update(self, algorithm): + perc = self.termination.update(algorithm) + self.history.append(perc) + return min(self.history) diff --git a/pymoo/termination/xtol.py b/pymoo/termination/xtol.py new file mode 100644 index 000000000..ec6929114 --- /dev/null +++ b/pymoo/termination/xtol.py @@ -0,0 +1,23 @@ +from pymoo.indicators.igd import IGD +from pymoo.util.normalization import normalize +from pymoo.util.termination.delta import DeltaToleranceTermination + + +class DesignSpaceTermination(DeltaToleranceTermination): + + def __init__(self, tol=0.001, **kwargs): + super().__init__(tol, **kwargs) + + def _delta(self, prev, current): + return IGD(current).do(prev) + + def _data(self, algorithm): + + X = algorithm.opt.get("X") + + # do normalization if bounds are given + problem = algorithm.problem + if X.dtype != object and problem.has_bounds(): + X = normalize(X, xl=problem.xl, xu=problem.xu) + + return X diff --git a/pymoo/util/progress.py b/pymoo/util/progress.py new file mode 100644 index 000000000..e69de29bb diff --git a/pymoo/util/termination/constr_violation.py b/pymoo/util/termination/constr_violation.py deleted file mode 100644 index 6da2cb278..000000000 --- a/pymoo/util/termination/constr_violation.py +++ /dev/null @@ -1,53 +0,0 @@ -from pymoo.core.termination import Termination -from pymoo.util.misc import to_numpy -from pymoo.util.termination.sliding_window_termination import SlidingWindowTermination - - -class ConstraintViolationToleranceTermination(SlidingWindowTermination): - - def __init__(self, - n_last=20, - tol=1e-6, - nth_gen=1, - n_max_gen=None, - n_max_evals=None, - **kwargs): - - super().__init__(metric_window_size=n_last, - data_window_size=2, - min_data_for_metric=2, - nth_gen=nth_gen, - n_max_gen=n_max_gen, - n_max_evals=n_max_evals, - **kwargs) - self.tol = tol - - def _store(self, algorithm): - return algorithm.opt.get("CV").max() - - def _metric(self, data): - last, current = data[-2], data[-1] - return {"cv": current, - "delta_cv": abs(last - current) - } - - def _decide(self, metrics): - cv = to_numpy([e["cv"] for e in metrics]) - delta_cv = to_numpy([e["delta_cv"] for e in metrics]) - n_feasible = (cv <= 0).sum() - - # if the whole window had only feasible solutions - if n_feasible == len(metrics): - return False - # transition period - some were feasible some were not - elif 0 < n_feasible < len(metrics): - return True - # all solutions are infeasible - else: - return delta_cv.max() > self.tol - - -class FeasibleSolutionFoundTermination(Termination): - - def _do_continue(self, algorithm): - return algorithm.opt.get("CV").min() != 0 diff --git a/pymoo/util/termination/default.py b/pymoo/util/termination/default.py deleted file mode 100644 index 4804daf72..000000000 --- a/pymoo/util/termination/default.py +++ /dev/null @@ -1,76 +0,0 @@ -from pymoo.util.termination.constr_violation import ConstraintViolationToleranceTermination -from pymoo.util.termination.f_tol import MultiObjectiveSpaceToleranceTermination -from pymoo.util.termination.f_tol_single import SingleObjectiveSpaceToleranceTermination -from pymoo.util.termination.sliding_window_termination import SlidingWindowTermination -from pymoo.util.termination.x_tol import DesignSpaceToleranceTermination - - -class DefaultTermination(SlidingWindowTermination): - - def __init__(self, - x_tol, - cv_tol, - f_tol, - n_max_gen=1000, - n_max_evals=100000, - **kwargs): - super().__init__(metric_window_size=1, - data_window_size=1, - min_data_for_metric=1, - n_max_gen=n_max_gen, - n_max_evals=n_max_evals, - **kwargs) - - self.x_tol = x_tol - self.cv_tol = cv_tol - self.f_tol = f_tol - - def _store(self, algorithm): - return algorithm - - def _metric(self, data): - algorithm = data[-1] - return { - "x_tol": self.x_tol.do_continue(algorithm), - "cv_tol": self.cv_tol.do_continue(algorithm), - "f_tol": self.f_tol.do_continue(algorithm) - } - - def _decide(self, metrics): - decisions = metrics[-1] - return decisions["x_tol"] and (decisions["cv_tol"] or decisions["f_tol"]) - - -class SingleObjectiveDefaultTermination(DefaultTermination): - - def __init__(self, - x_tol=1e-8, - cv_tol=1e-6, - f_tol=1e-6, - nth_gen=5, - n_last=20, - **kwargs) -> None: - super().__init__(DesignSpaceToleranceTermination(tol=x_tol, n_last=n_last), - ConstraintViolationToleranceTermination(tol=cv_tol, n_last=n_last), - SingleObjectiveSpaceToleranceTermination(tol=f_tol, n_last=n_last, nth_gen=nth_gen), - **kwargs) - - -class MultiObjectiveDefaultTermination(DefaultTermination): - def __init__(self, - x_tol=1e-8, - cv_tol=1e-6, - f_tol=0.0025, - nth_gen=5, - n_last=30, - **kwargs) -> None: - """ - - Returns - ------- - object - """ - super().__init__(DesignSpaceToleranceTermination(tol=x_tol, n_last=n_last), - ConstraintViolationToleranceTermination(tol=cv_tol, n_last=n_last), - MultiObjectiveSpaceToleranceTermination(tol=f_tol, n_last=n_last, nth_gen=nth_gen), - **kwargs) diff --git a/pymoo/util/termination/f_tol.py b/pymoo/util/termination/f_tol.py deleted file mode 100644 index ec16546f1..000000000 --- a/pymoo/util/termination/f_tol.py +++ /dev/null @@ -1,133 +0,0 @@ -import numpy as np - -from pymoo.indicators.hv import Hypervolume -from pymoo.indicators.igd import IGD -from pymoo.util.normalization import normalize -from pymoo.util.termination.sliding_window_termination import SlidingWindowTermination - - -def calc_delta(a, b): - return np.max(np.abs((a - b))) - - -def calc_delta_norm_old(a, b): - return np.max(np.abs((a - b)) / np.abs((a + b) / 2)) - - -def calc_delta_norm(a, b, norm): - return np.max(np.abs((a - b) / norm)) - - -class MultiObjectiveSpaceToleranceTermination(SlidingWindowTermination): - - def __init__(self, - tol=0.0025, - n_last=30, - nth_gen=5, - n_max_gen=None, - n_max_evals=None, - **kwargs) -> None: - super().__init__(metric_window_size=n_last, - data_window_size=2, - min_data_for_metric=2, - nth_gen=nth_gen, - n_max_gen=n_max_gen, - n_max_evals=n_max_evals, - **kwargs) - self.tol = tol - - def _store(self, algorithm): - F = algorithm.opt.get("F") - return { - "ideal": F.min(axis=0), - "nadir": F.max(axis=0), - "F": F - } - - def _metric(self, data): - last, current = data[-2], data[-1] - - # this is the range between the nadir and the ideal point - norm = current["nadir"] - current["ideal"] - - # if the range is degenerated (very close to zero) - disable normalization by dividing by one - norm[norm < 1e-32] = 1 - - # calculate the change from last to current in ideal and nadir point - delta_ideal = calc_delta_norm(current["ideal"], last["ideal"], norm) - delta_nadir = calc_delta_norm(current["nadir"], last["nadir"], norm) - - # get necessary data from the current population - c_F, c_ideal, c_nadir = current["F"], current["ideal"], current["nadir"] - - # normalize last and current with respect to most recent ideal and nadir - c_N = normalize(c_F, c_ideal, c_nadir) - l_N = normalize(last["F"], c_ideal, c_nadir) - - # calculate IGD from one to another - delta_f = IGD(c_N).do(l_N) - - return { - "delta_ideal": delta_ideal, - "delta_nadir": delta_nadir, - "delta_f": delta_f - } - - def _decide(self, metrics): - delta_ideal = [e["delta_ideal"] for e in metrics] - delta_nadir = [e["delta_nadir"] for e in metrics] - delta_f = [e["delta_f"] for e in metrics] - return max(max(delta_ideal), max(delta_nadir), max(delta_f)) > self.tol - - -class MultiObjectiveSpaceToleranceTerminationWithRenormalization(MultiObjectiveSpaceToleranceTermination): - - def __init__(self, - n_last=30, - all_to_current=False, - sliding_window=True, - perf_indicator="igd", - **kwargs) -> None: - - super().__init__(n_last=n_last, - truncate_metrics=False, - truncate_data=False, - **kwargs) - self.data = [] - self.all_to_current = all_to_current - self.sliding_window = sliding_window - self.perf_indicator = perf_indicator - - def _metric(self, data): - ret = super()._metric(data) - - if not self.sliding_window: - data = self.data[-self.metric_window_size:] - - # get necessary data from the current population - current = data[-1] - c_F, c_ideal, c_nadir = current["F"], current["ideal"], current["nadir"] - - # normalize all previous generations with respect to current ideal and nadir - N = [normalize(e["F"], c_ideal, c_nadir) for e in data] - - # check if the movement of all points is significant - if self.all_to_current: - c_N = normalize(c_F, c_ideal, c_nadir) - if self.perf_indicator == "igd": - delta_f = [IGD(c_N).do(N[k]) for k in range(len(N))] - elif self.perf_indicator == "hv": - hv = Hypervolume(ref_point=np.ones(c_F.shape[1])) - delta_f = [hv.do(N[k]) for k in range(len(N))] - else: - delta_f = [IGD(N[k + 1]).do(N[k]) for k in range(len(N) - 1)] - - ret["delta_f"] = delta_f - - return ret - - def _decide(self, metrics): - delta_ideal = [e["delta_ideal"] for e in metrics] - delta_nadir = [e["delta_nadir"] for e in metrics] - delta_f = [max(e["delta_f"]) for e in metrics] - return max(max(delta_ideal), max(delta_nadir), max(delta_f)) > self.tol diff --git a/pymoo/util/termination/f_tol_single.py b/pymoo/util/termination/f_tol_single.py deleted file mode 100644 index c9e1c3d1b..000000000 --- a/pymoo/util/termination/f_tol_single.py +++ /dev/null @@ -1,32 +0,0 @@ -from pymoo.util.misc import to_numpy -from pymoo.util.termination.sliding_window_termination import SlidingWindowTermination - - -class SingleObjectiveSpaceToleranceTermination(SlidingWindowTermination): - - def __init__(self, - tol=1e-6, - n_last=20, - nth_gen=1, - n_max_gen=None, - n_max_evals=None, - **kwargs) -> None: - super().__init__(metric_window_size=n_last, - data_window_size=2, - min_data_for_metric=2, - nth_gen=nth_gen, - n_max_gen=n_max_gen, - n_max_evals=n_max_evals, - **kwargs) - self.tol = tol - - def _store(self, algorithm): - return algorithm.opt.get("F").min() - - def _metric(self, data): - last, current = data[-2], data[-1] - return last - current - - def _decide(self, metrics): - delta_f = to_numpy(metrics) - return delta_f.max() > self.tol diff --git a/pymoo/util/termination/max_eval.py b/pymoo/util/termination/max_eval.py deleted file mode 100644 index 948cf6eb5..000000000 --- a/pymoo/util/termination/max_eval.py +++ /dev/null @@ -1,15 +0,0 @@ -from pymoo.core.termination import Termination - - -class MaximumFunctionCallTermination(Termination): - - def __init__(self, n_max_evals) -> None: - super().__init__() - self.n_max_evals = n_max_evals - - if self.n_max_evals is None: - self.n_max_evals = float("inf") - - def _do_continue(self, algorithm): - return algorithm.evaluator.n_eval < self.n_max_evals - diff --git a/pymoo/util/termination/max_gen.py b/pymoo/util/termination/max_gen.py deleted file mode 100644 index 96834cf52..000000000 --- a/pymoo/util/termination/max_gen.py +++ /dev/null @@ -1,15 +0,0 @@ -from pymoo.core.termination import Termination - - -class MaximumGenerationTermination(Termination): - - def __init__(self, n_max_gen) -> None: - super().__init__() - self.n_max_gen = n_max_gen - - if self.n_max_gen is None: - self.n_max_gen = float("inf") - - def _do_continue(self, algorithm): - return algorithm.n_gen < self.n_max_gen - diff --git a/pymoo/util/termination/min_igd.py b/pymoo/util/termination/min_igd.py deleted file mode 100644 index 20ff98285..000000000 --- a/pymoo/util/termination/min_igd.py +++ /dev/null @@ -1,18 +0,0 @@ -from pymoo.core.termination import Termination -from pymoo.indicators.igd import IGD - - -class IGDTermination(Termination): - - def __init__(self, min_igd, pf) -> None: - super().__init__() - if pf is None: - raise Exception("You can only use IGD termination criteria if the pareto front is known!") - - self.perf = IGD(pf) - self.min_igd = min_igd - - def _do_continue(self, algorithm): - F = algorithm.opt.get("F") - return self.perf.do(F) > self.min_igd - diff --git a/pymoo/util/termination/no_termination.py b/pymoo/util/termination/no_termination.py deleted file mode 100644 index 5bd75e0ec..000000000 --- a/pymoo/util/termination/no_termination.py +++ /dev/null @@ -1,7 +0,0 @@ -from pymoo.core.termination import Termination - - -class NoTermination(Termination): - - def _do_continue(self, algorithm, **kwargs): - return True diff --git a/pymoo/util/termination/sliding_window_termination.py b/pymoo/util/termination/sliding_window_termination.py deleted file mode 100644 index 6c53b314f..000000000 --- a/pymoo/util/termination/sliding_window_termination.py +++ /dev/null @@ -1,99 +0,0 @@ -from abc import abstractmethod - -from pymoo.util.sliding_window import SlidingWindow -from pymoo.util.termination.collection import TerminationCollection -from pymoo.util.termination.max_eval import MaximumFunctionCallTermination -from pymoo.util.termination.max_gen import MaximumGenerationTermination - - -class SlidingWindowTermination(TerminationCollection): - - def __init__(self, - metric_window_size=None, - data_window_size=None, - min_data_for_metric=1, - nth_gen=1, - n_max_gen=None, - n_max_evals=None, - truncate_metrics=True, - truncate_data=True, - ) -> None: - """ - - Parameters - ---------- - - metric_window_size : int - The last generations that should be considering during the calculations - - data_window_size : int - How much of the history should be kept in memory based on a sliding window. - - nth_gen : int - Each n-th generation the termination should be checked for - - """ - - super().__init__(MaximumGenerationTermination(n_max_gen=n_max_gen), - MaximumFunctionCallTermination(n_max_evals=n_max_evals)) - - # the window sizes stored in objects - self.data_window_size = data_window_size - self.metric_window_size = metric_window_size - - # the obtained data at each iteration - self.data = SlidingWindow(data_window_size) if truncate_data else [] - - # the metrics calculated also in a sliding window - self.metrics = SlidingWindow(metric_window_size) if truncate_metrics else [] - - # each n-th generation the termination decides whether to terminate or not - self.nth_gen = nth_gen - - # number of problems of data need to be stored to calculate the metric at all - self.min_data_for_metric = min_data_for_metric - - def _do_continue(self, algorithm): - - # if the maximum generation or maximum evaluations say terminated -> do so - if not super()._do_continue(algorithm): - return False - - # store the data decided to be used by the implementation - obj = self._store(algorithm) - if obj is not None: - self.data.append(obj) - - # if enough data has be stored to calculate the metric - if len(self.data) >= self.min_data_for_metric: - metric = self._metric(self.data[-self.data_window_size:]) - if metric is not None: - self.metrics.append(metric) - - # if its the n-th generation and enough metrics have been calculated make the decision - if algorithm.n_gen % self.nth_gen == 0 and len(self.metrics) >= self.metric_window_size: - - # ask the implementation whether to terminate or not - return self._decide(self.metrics[-self.metric_window_size:]) - - # otherwise by default just continue - else: - return True - - # given an algorithm object decide what should be stored as historical information - by default just opt - def _store(self, algorithm): - return algorithm.opt - - @abstractmethod - def _decide(self, metrics): - pass - - @abstractmethod - def _metric(self, data): - pass - - def get_metric(self): - if len(self.metrics) > 0: - return self.metrics[-1] - else: - return None diff --git a/pymoo/util/termination/x_tol.py b/pymoo/util/termination/x_tol.py deleted file mode 100644 index d350f6a61..000000000 --- a/pymoo/util/termination/x_tol.py +++ /dev/null @@ -1,42 +0,0 @@ -import numpy as np - -from pymoo.indicators.igd import IGD -from pymoo.util.misc import to_numpy -from pymoo.util.normalization import normalize -from pymoo.util.termination.sliding_window_termination import SlidingWindowTermination - - -class DesignSpaceToleranceTermination(SlidingWindowTermination): - - def __init__(self, - n_last=20, - tol=1e-6, - nth_gen=1, - n_max_gen=None, - n_max_evals=None, - **kwargs): - - super().__init__(metric_window_size=n_last, - data_window_size=2, - min_data_for_metric=2, - nth_gen=nth_gen, - n_max_gen=n_max_gen, - n_max_evals=n_max_evals, - **kwargs) - self.tol = tol - - def _store(self, algorithm): - problem = algorithm.problem - X = algorithm.opt.get("X") - - if X.dtype != object: - if problem.xl is not None and problem.xu is not None: - X = normalize(X, xl=problem.xl, xu=problem.xu) - return X - - def _metric(self, data): - last, current = data[-2], data[-1] - return IGD(current).do(last) - - def _decide(self, metrics): - return to_numpy(metrics).mean() > self.tol