Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 91 additions & 13 deletions libensemble/gen_classes/aposmm.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import copy
from math import gamma, pi, sqrt
from typing import List

import numpy as np
Expand All @@ -11,38 +12,115 @@

class APOSMM(PersistentGenInterfacer):
"""
Standalone object-oriented APOSMM generator
APOSMM coordinates multiple local optimization runs, dramatically reducing time for
discovering multiple minima on parallel systems.

This *generator* adheres to the `Generator Standard <https://github.com/campa-consortium/generator_standard>`_.

.. seealso::

`https://doi.org/10.1007/s12532-017-0131-4 <https://doi.org/10.1007/s12532-017-0131-4>`_

Parameters
----------
vocs: VOCS
The VOCS object, adhering to the VOCS interface from the Generator Standard.

History: npt.NDArray = []
An optional history of previously evaluated points.

initial_sample_size: int = 100
Number of uniformly sampled points
to be evaluated before starting the localopt runs. Can be
zero if no additional sampling is desired, but if zero there must be past values
provided in the History.

sample_points: npt.NDArray = None
Points to be sampled (original domain).
If more sample points are needed by APOSMM during the course of the
optimization, points will be drawn uniformly over the domain.

localopt_method: str = "LN_BOBYQA"
The local optimization method to use.

rk_const: float = None
Multiplier in front of the ``r_k`` value.
If not provided, it will be set to ``0.5 * ((gamma(1 + (n / 2)) * 5) ** (1 / n)) / sqrt(pi)``

xtol_abs: float = 1e-6
Localopt method's convergence tolerance.

ftol_abs: float = 1e-6
Localopt method's convergence tolerance.

dist_to_bound_multiple: float = 0.5
What fraction of the distance to the nearest boundary should the initial
step size be in localopt runs.

max_active_runs: int = 6
Bound on number of runs APOSMM is advancing.

random_seed: int = 1
Seed for the random number generator.
"""

def __init__(
self,
vocs: VOCS,
History: npt.NDArray = [],
persis_info: dict = {},
gen_specs: dict = {},
libE_info: dict = {},
initial_sample_size: int = 100,
sample_points: npt.NDArray = None,
localopt_method: str = "LN_BOBYQA",
rk_const: float = None,
xtol_abs: float = 1e-6,
ftol_abs: float = 1e-6,
dist_to_bound_multiple: float = 0.5,
max_active_runs: int = 6,
random_seed: int = 1,
**kwargs,
) -> None:

from libensemble.gen_funcs.persistent_aposmm import aposmm

self.VOCS = vocs

gen_specs = {}
persis_info = {"1": np.random.default_rng(random_seed)}
libE_info = {}
gen_specs["gen_f"] = aposmm
self.n = len(list(self.VOCS.variables.keys()))

if not rk_const:
rk_const = 0.5 * ((gamma(1 + (self.n / 2)) * 5) ** (1 / self.n)) / sqrt(pi)

gen_specs["user"] = {}
gen_specs["user"]["lb"] = np.array([vocs.variables[i].domain[0] for i in vocs.variables])
gen_specs["user"]["ub"] = np.array([vocs.variables[i].domain[1] for i in vocs.variables])

if not gen_specs.get("out"): # gen_specs never especially changes for aposmm even as the problem varies
gen_specs["out"] = [
("x", float, self.n),
("x_on_cube", float, self.n),
("sim_id", int),
("local_min", bool),
("local_pt", bool),
]
gen_specs["persis_in"] = ["x", "f", "local_pt", "sim_id", "sim_ended", "x_on_cube", "local_min"]
FIELDS = [
"initial_sample_size",
"sample_points",
"localopt_method",
"rk_const",
"xtol_abs",
"ftol_abs",
"dist_to_bound_multiple",
"max_active_runs",
]

for k in FIELDS:
val = locals().get(k)
if val is not None:
gen_specs["user"][k] = val

gen_specs["out"] = [
("x", float, self.n),
("x_on_cube", float, self.n),
("sim_id", int),
("local_min", bool),
("local_pt", bool),
]
gen_specs["persis_in"] = ["x", "f", "local_pt", "sim_id", "sim_ended", "x_on_cube", "local_min"]
super().__init__(vocs, History, persis_info, gen_specs, libE_info, **kwargs)

if not self.persis_info.get("nworkers"):
Expand Down
91 changes: 50 additions & 41 deletions libensemble/tests/unit_tests/test_persistent_aposmm.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,52 @@ def test_standalone_persistent_aposmm():
assert min_found >= 6, f"Found {min_found} minima"


def _evaluate_aposmm_instance(my_APOSMM):
from libensemble.message_numbers import FINISHED_PERSISTENT_GEN_TAG
from libensemble.sim_funcs.six_hump_camel import six_hump_camel_func
from libensemble.tests.regression_tests.support import six_hump_camel_minima as minima

initial_sample = my_APOSMM.suggest(100)

total_evals = 0
eval_max = 2000

for point in initial_sample:
point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]]))
total_evals += 1

my_APOSMM.ingest(initial_sample)

potential_minima = []

while total_evals < eval_max:

sample, detected_minima = my_APOSMM.suggest(6), my_APOSMM.suggest_updates()
if len(detected_minima):
for m in detected_minima:
potential_minima.append(m)
for point in sample:
point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]]))
total_evals += 1
my_APOSMM.ingest(sample)
H, persis_info, exit_code = my_APOSMM.finalize()

assert exit_code == FINISHED_PERSISTENT_GEN_TAG, "Standalone persistent_aposmm didn't exit correctly"
assert persis_info.get("run_order"), "Standalone persistent_aposmm didn't do any localopt runs"

assert len(potential_minima) >= 6, f"Found {len(potential_minima)} minima"

tol = 1e-3
min_found = 0
for m in minima:
# The minima are known on this test problem.
# We use their values to test APOSMM has identified all minima
print(np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)), flush=True)
if np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)) < tol:
min_found += 1
assert min_found >= 6, f"Found {min_found} minima"


@pytest.mark.extra
def test_standalone_persistent_aposmm_combined_func():
from math import gamma, pi, sqrt
Expand Down Expand Up @@ -176,14 +222,11 @@ def test_asktell_with_persistent_aposmm():

import libensemble.gen_funcs
from libensemble.gen_classes import APOSMM
from libensemble.message_numbers import FINISHED_PERSISTENT_GEN_TAG
from libensemble.sim_funcs.six_hump_camel import six_hump_camel_func
from libensemble.tests.regression_tests.support import six_hump_camel_minima as minima

libensemble.gen_funcs.rc.aposmm_optimizers = "nlopt"

n = 2
eval_max = 2000

variables = {"core": [-3, 3], "edge": [-2, 2]}
objectives = {"energy": "MINIMIZE"}
Expand All @@ -202,45 +245,11 @@ def test_asktell_with_persistent_aposmm():
max_active_runs=6,
)

initial_sample = my_APOSMM.suggest(100)

total_evals = 0
eval_max = 2000

for point in initial_sample:
point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]]))
total_evals += 1

my_APOSMM.ingest(initial_sample)

potential_minima = []
_evaluate_aposmm_instance(my_APOSMM)

while total_evals < eval_max:

sample, detected_minima = my_APOSMM.suggest(6), my_APOSMM.suggest_updates()
if len(detected_minima):
for m in detected_minima:
potential_minima.append(m)
for point in sample:
point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]]))
total_evals += 1
my_APOSMM.ingest(sample)
H, persis_info, exit_code = my_APOSMM.finalize()

assert exit_code == FINISHED_PERSISTENT_GEN_TAG, "Standalone persistent_aposmm didn't exit correctly"
assert persis_info.get("run_order"), "Standalone persistent_aposmm didn't do any localopt runs"

assert len(potential_minima) >= 6, f"Found {len(potential_minima)} minima"

tol = 1e-3
min_found = 0
for m in minima:
# The minima are known on this test problem.
# We use their values to test APOSMM has identified all minima
print(np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)), flush=True)
if np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)) < tol:
min_found += 1
assert min_found >= 6, f"Found {min_found} minima"
# test initializing/using with default parameters:
my_APOSMM = APOSMM(vocs)
_evaluate_aposmm_instance(my_APOSMM)


if __name__ == "__main__":
Expand Down
Loading