Skip to content

Commit 59f68e8

Browse files
author
Julian Blank
committed
Pattern Search and Riesz s-Energy Reference Directions
1 parent f6d6931 commit 59f68e8

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+1927
-387
lines changed

pymoo/algorithms/genetic_algorithm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def _initialize(self):
7777
if self.survival:
7878
pop = self.survival.do(self.problem, pop, len(pop), algorithm=self)
7979

80-
self.pop = pop
80+
self.pop, self.off = pop, pop
8181

8282
def _next(self):
8383

pymoo/algorithms/so_adam.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@
55

66
class Adam(GradientBasedAlgorithm):
77

8-
def __init__(self, X,
8+
def __init__(self,
9+
X,
910
alpha=0.005,
1011
beta_1=0.9,
1112
beta_2=0.999,

pymoo/algorithms/so_cmaes.py

Lines changed: 30 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import numpy as np
22

3+
from pymoo.algorithms.so_local_search import LocalSearch
34
from pymoo.docs import parse_doc_string
4-
from pymoo.model.algorithm import Algorithm, filter_optimum
5+
from pymoo.model.algorithm import filter_optimum
56
from pymoo.model.population import Population
6-
from pymoo.operators.sampling.random_sampling import FloatRandomSampling
77
from pymoo.util.display import Display
88
from pymoo.util.termination.max_eval import MaximumFunctionCallTermination
99
from pymoo.util.termination.max_gen import MaximumGenerationTermination
@@ -47,10 +47,9 @@ def _do(self, problem, evaluator, algorithm):
4747
self.output.append("axis", axis, width=8)
4848

4949

50-
class CMAES(Algorithm):
50+
class CMAES(LocalSearch):
5151

5252
def __init__(self,
53-
x0=None,
5453
sigma=0.5,
5554
parallelize=True,
5655
maxfevals=np.inf,
@@ -339,7 +338,6 @@ def __init__(self,
339338
self.es = None
340339
self.cma = None
341340

342-
self.x0 = x0
343341
self.sigma = sigma
344342
self.restarts = restarts
345343
self.restart_from_best = restart_from_best
@@ -362,6 +360,7 @@ def __init__(self,
362360
self.default_termination = NoTermination()
363361
self.send_array_to_yield = True
364362
self.parallelize = parallelize
363+
self.al = None
365364

366365
def initialize(self, problem, seed=None, **kwargs):
367366
super().initialize(problem, **kwargs)
@@ -378,13 +377,18 @@ def initialize(self, problem, seed=None, **kwargs):
378377
elif isinstance(self.termination, MaximumFunctionCallTermination):
379378
self.options['maxfevals'] = self.termination.n_max_evals
380379

381-
if self.x0 is None:
382-
np.random.seed(seed)
383-
self.x0 = FloatRandomSampling().do(problem, 1).get("X")[0]
380+
# if self.problem.n_constr > 0:
381+
# _al = AugmentedLagrangian(problem.n_var)
382+
# _al.set_m(problem.n_constr)
383+
# _al._equality = np.full(problem.n_constr, False)
384+
# self.al = _al
385+
# kwargs.setdefault('options', {}).setdefault('tolstagnation', 0)
384386

385-
self.es = my_fmin(
386-
self.x0,
387-
self.sigma,
387+
def _initialize(self):
388+
super()._initialize()
389+
self.pop = Population()
390+
391+
kwargs = dict(
388392
options=self.options,
389393
parallelize=self.parallelize,
390394
restarts=self.restarts,
@@ -394,19 +398,30 @@ def initialize(self, problem, seed=None, **kwargs):
394398
noise_handler=self.noise_handler,
395399
noise_change_sigma_exponent=self.noise_change_sigma_exponent,
396400
noise_kappa_exponent=self.noise_kappa_exponent,
397-
bipop=self.bipop
398-
)
401+
bipop=self.bipop)
399402

400-
def _initialize(self):
403+
self.es = my_fmin(self.x0.X, self.sigma, **kwargs)
401404
self._next()
402405

403406
def _next(self):
404407

405-
if self.pop is None:
408+
if self.pop is None or len(self.pop) == 0:
406409
X = next(self.es)
407410

408411
else:
409412
F = self.pop.get("F")[:, 0].tolist()
413+
#
414+
# if self.problem.n_constr > 0:
415+
# G = self.pop.get("G").tolist()
416+
# self.al.set_coefficients(F, G)
417+
#
418+
# x = self.es.gi_frame.f_locals["es"].ask(1, sigma_fac=0)[0]
419+
# ind = Individual(X=x)
420+
# self.evaluator.eval(self.problem, ind, algorithm=self)
421+
# self.al.update(ind.F[0], ind.G)
422+
#
423+
# F = F + sum(self.al(G))
424+
410425
if not self.send_array_to_yield:
411426
F = F[0]
412427

pymoo/algorithms/so_de.py

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from pymoo.algorithms.genetic_algorithm import GeneticAlgorithm
44
from pymoo.docs import parse_doc_string
5+
from pymoo.model.replacement import ImprovementReplacement
56
from pymoo.operators.crossover.biased_crossover import BiasedCrossover
67
from pymoo.operators.crossover.differental_evolution_crossover import DifferentialEvolutionCrossover
78
from pymoo.operators.crossover.exponential_crossover import ExponentialCrossover
@@ -123,17 +124,8 @@ def _next(self):
123124
# evaluate the results
124125
self.evaluator.eval(self.problem, self.off, algorithm=self)
125126

126-
_F, _CV, _feasible = self.off.get("F", "CV", "feasible")
127-
_F = parameter_less(_F, _CV)
128-
129-
# find the individuals which are indeed better
130-
is_better = np.where((_F <= F)[:, 0])[0]
131-
132-
# replace the individuals in the population
133-
pop[is_better] = self.off[is_better]
134-
135-
# store the population in the algorithm object
136-
self.pop = pop
127+
# replace the individuals that have improved
128+
self.pop = ImprovementReplacement().do(self.problem, self.pop, self.off)
137129

138130

139131
parse_doc_string(DE.__init__)
Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
from pymoo.model.algorithm import Algorithm
2+
from pymoo.model.population import pop_from_array_or_individual
3+
from pymoo.operators.sampling.latin_hypercube_sampling import LatinHypercubeSampling
4+
5+
6+
class LocalSearch(Algorithm):
7+
8+
def __init__(self,
9+
x0=None,
10+
sampling=LatinHypercubeSampling(),
11+
n_sample_points="auto",
12+
**kwargs):
13+
14+
super().__init__(**kwargs)
15+
self.x0 = x0
16+
self.sampling = sampling
17+
self.n_sample_points = n_sample_points
18+
19+
def initialize(self, problem, **kwargs):
20+
super().initialize(problem, **kwargs)
21+
22+
if self.n_sample_points == "auto":
23+
self.n_sample_points = self.problem.n_var * 5
24+
25+
def _initialize(self, **kwargs):
26+
super()._initialize(**kwargs)
27+
28+
# no initial point is provided - sample in bounds and take the best
29+
if self.x0 is None:
30+
if not self.problem.has_bounds():
31+
raise Exception("Either provide an x0 or a problem with variable bounds!")
32+
33+
self.pop = self.sampling.do(self.problem, self.n_sample_points)
34+
else:
35+
self.pop = pop_from_array_or_individual(self.x0)
36+
37+
self.evaluator.eval(self.problem, self.pop, algorithm=self)
38+
self._set_optimum()
39+
self.x0 = self.opt[0]

0 commit comments

Comments
 (0)