From c8d6a82b447a30d18d91c7df55297de868498925 Mon Sep 17 00:00:00 2001 From: jlnav Date: Thu, 21 Aug 2025 15:48:36 -0500 Subject: [PATCH 1/6] starting to populate the APOSMM class with common kwargs, for better documenting, and so we don't have to check the existence of settings in kwargso --- libensemble/gen_classes/aposmm.py | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/libensemble/gen_classes/aposmm.py b/libensemble/gen_classes/aposmm.py index 45a522279..60122a1d4 100644 --- a/libensemble/gen_classes/aposmm.py +++ b/libensemble/gen_classes/aposmm.py @@ -1,4 +1,5 @@ import copy +from math import gamma, pi, sqrt from typing import List import numpy as np @@ -18,22 +19,44 @@ def __init__( self, vocs: VOCS, History: npt.NDArray = [], - persis_info: dict = {}, - gen_specs: dict = {}, - libE_info: dict = {}, + initial_sample_size: int = 100, + sample_points: npt.NDArray = None, + localopt_method: str = "LN_BOBYQA", + rk_const: float = None, + xtol_abs: float = 1e-6, + ftol_abs: float = 1e-6, + dist_to_bound_multiple: float = 0.5, + max_active_runs: int = 6, **kwargs, ) -> None: + from libensemble.gen_funcs.persistent_aposmm import aposmm self.VOCS = vocs + gen_specs = {} + persis_info = {} + libE_info = {} gen_specs["gen_f"] = aposmm self.n = len(list(self.VOCS.variables.keys())) + if not rk_const: + rk_const = 0.5 * ((gamma(1 + (self.n / 2)) * 5) ** (1 / self.n)) / sqrt(pi) + gen_specs["user"] = {} gen_specs["user"]["lb"] = np.array([vocs.variables[i].domain[0] for i in vocs.variables]) gen_specs["user"]["ub"] = np.array([vocs.variables[i].domain[1] for i in vocs.variables]) + gen_specs["user"]["initial_sample_size"] = initial_sample_size + if sample_points: + gen_specs["user"]["sample_points"] = sample_points + gen_specs["user"]["localopt_method"] = localopt_method + gen_specs["user"]["rk_const"] = rk_const + gen_specs["user"]["xtol_abs"] = xtol_abs + gen_specs["user"]["ftol_abs"] = ftol_abs + gen_specs["user"]["dist_to_bound_multiple"] = dist_to_bound_multiple + gen_specs["user"]["max_active_runs"] = max_active_runs + if not gen_specs.get("out"): # gen_specs never especially changes for aposmm even as the problem varies gen_specs["out"] = [ ("x", float, self.n), From 1dce05935cbf2be9dc2df1498daf1d32153a3b4d Mon Sep 17 00:00:00 2001 From: jlnav Date: Fri, 22 Aug 2025 07:49:11 -0500 Subject: [PATCH 2/6] small fixes --- libensemble/gen_classes/aposmm.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/libensemble/gen_classes/aposmm.py b/libensemble/gen_classes/aposmm.py index 60122a1d4..d5597ecf6 100644 --- a/libensemble/gen_classes/aposmm.py +++ b/libensemble/gen_classes/aposmm.py @@ -27,6 +27,7 @@ def __init__( ftol_abs: float = 1e-6, dist_to_bound_multiple: float = 0.5, max_active_runs: int = 6, + random_seed: int = 1, **kwargs, ) -> None: @@ -35,7 +36,7 @@ def __init__( self.VOCS = vocs gen_specs = {} - persis_info = {} + persis_info = {"1": np.random.default_rng(random_seed)} libE_info = {} gen_specs["gen_f"] = aposmm self.n = len(list(self.VOCS.variables.keys())) @@ -48,7 +49,7 @@ def __init__( gen_specs["user"]["ub"] = np.array([vocs.variables[i].domain[1] for i in vocs.variables]) gen_specs["user"]["initial_sample_size"] = initial_sample_size - if sample_points: + if sample_points is not None: gen_specs["user"]["sample_points"] = sample_points gen_specs["user"]["localopt_method"] = localopt_method gen_specs["user"]["rk_const"] = rk_const From 77845a0a202b6f6350eeaba996374746e4572109 Mon Sep 17 00:00:00 2001 From: jlnav Date: Fri, 22 Aug 2025 08:08:34 -0500 Subject: [PATCH 3/6] docstring for APOSMM class --- libensemble/gen_classes/aposmm.py | 51 ++++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/libensemble/gen_classes/aposmm.py b/libensemble/gen_classes/aposmm.py index d5597ecf6..ac792c31c 100644 --- a/libensemble/gen_classes/aposmm.py +++ b/libensemble/gen_classes/aposmm.py @@ -12,7 +12,56 @@ class APOSMM(PersistentGenInterfacer): """ - Standalone object-oriented APOSMM generator + APOSMM coordinates multiple local optimization runs, dramatically reducing time for + discovering multiple minima on parallel systems. + + This *generator* adheres to the `Generator Standard `_. + + .. seealso:: + + `https://doi.org/10.1007/s12532-017-0131-4 `_ + + Parameters + ---------- + vocs: VOCS + The VOCS object, adhering to the VOCS interface from the Generator Standard. + + History: npt.NDArray = [] + An optional history of previously evaluated points. + + initial_sample_size: int = 100 + Number of uniformly sampled points + to be evaluated before starting the localopt runs. Can be + zero if no additional sampling is desired, but if zero there must be past values + provided in the History. + + sample_points: npt.NDArray = None + Points to be sampled (original domain). + If more sample points are needed by APOSMM during the course of the + optimization, points will be drawn uniformly over the domain. + + localopt_method: str = "LN_BOBYQA" + The local optimization method to use. + + rk_const: float = None + Multiplier in front of the ``r_k`` value. + If not provided, it will be set to ``0.5 * ((gamma(1 + (n / 2)) * 5) ** (1 / n)) / sqrt(pi)`` + + xtol_abs: float = 1e-6 + Localopt method's convergence tolerance. + + ftol_abs: float = 1e-6 + Localopt method's convergence tolerance. + + dist_to_bound_multiple: float = 0.5 + What fraction of the distance to the nearest boundary should the initial + step size be in localopt runs. + + max_active_runs: int = 6 + Bound on number of runs APOSMM is advancing. + + random_seed: int = 1 + Seed for the random number generator. """ def __init__( From 2635236e1dda64b0c244cf94442bbf234d23e880 Mon Sep 17 00:00:00 2001 From: jlnav Date: Fri, 22 Aug 2025 13:37:57 -0500 Subject: [PATCH 4/6] evaluate an APOSMM with only VOCS passed in --- .../unit_tests/test_persistent_aposmm.py | 91 ++++++++++--------- 1 file changed, 50 insertions(+), 41 deletions(-) diff --git a/libensemble/tests/unit_tests/test_persistent_aposmm.py b/libensemble/tests/unit_tests/test_persistent_aposmm.py index d04d56198..392cee0d0 100644 --- a/libensemble/tests/unit_tests/test_persistent_aposmm.py +++ b/libensemble/tests/unit_tests/test_persistent_aposmm.py @@ -122,6 +122,52 @@ def test_standalone_persistent_aposmm(): assert min_found >= 6, f"Found {min_found} minima" +def _evaluate_aposmm_instance(my_APOSMM): + from libensemble.message_numbers import FINISHED_PERSISTENT_GEN_TAG + from libensemble.sim_funcs.six_hump_camel import six_hump_camel_func + from libensemble.tests.regression_tests.support import six_hump_camel_minima as minima + + initial_sample = my_APOSMM.suggest(100) + + total_evals = 0 + eval_max = 2000 + + for point in initial_sample: + point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]])) + total_evals += 1 + + my_APOSMM.ingest(initial_sample) + + potential_minima = [] + + while total_evals < eval_max: + + sample, detected_minima = my_APOSMM.suggest(6), my_APOSMM.suggest_updates() + if len(detected_minima): + for m in detected_minima: + potential_minima.append(m) + for point in sample: + point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]])) + total_evals += 1 + my_APOSMM.ingest(sample) + H, persis_info, exit_code = my_APOSMM.finalize() + + assert exit_code == FINISHED_PERSISTENT_GEN_TAG, "Standalone persistent_aposmm didn't exit correctly" + assert persis_info.get("run_order"), "Standalone persistent_aposmm didn't do any localopt runs" + + assert len(potential_minima) >= 6, f"Found {len(potential_minima)} minima" + + tol = 1e-3 + min_found = 0 + for m in minima: + # The minima are known on this test problem. + # We use their values to test APOSMM has identified all minima + print(np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)), flush=True) + if np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)) < tol: + min_found += 1 + assert min_found >= 6, f"Found {min_found} minima" + + @pytest.mark.extra def test_standalone_persistent_aposmm_combined_func(): from math import gamma, pi, sqrt @@ -176,14 +222,11 @@ def test_asktell_with_persistent_aposmm(): import libensemble.gen_funcs from libensemble.gen_classes import APOSMM - from libensemble.message_numbers import FINISHED_PERSISTENT_GEN_TAG - from libensemble.sim_funcs.six_hump_camel import six_hump_camel_func from libensemble.tests.regression_tests.support import six_hump_camel_minima as minima libensemble.gen_funcs.rc.aposmm_optimizers = "nlopt" n = 2 - eval_max = 2000 variables = {"core": [-3, 3], "edge": [-2, 2]} objectives = {"energy": "MINIMIZE"} @@ -202,45 +245,11 @@ def test_asktell_with_persistent_aposmm(): max_active_runs=6, ) - initial_sample = my_APOSMM.suggest(100) - - total_evals = 0 - eval_max = 2000 - - for point in initial_sample: - point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]])) - total_evals += 1 - - my_APOSMM.ingest(initial_sample) - - potential_minima = [] + _evaluate_aposmm_instance(my_APOSMM) - while total_evals < eval_max: - - sample, detected_minima = my_APOSMM.suggest(6), my_APOSMM.suggest_updates() - if len(detected_minima): - for m in detected_minima: - potential_minima.append(m) - for point in sample: - point["energy"] = six_hump_camel_func(np.array([point["core"], point["edge"]])) - total_evals += 1 - my_APOSMM.ingest(sample) - H, persis_info, exit_code = my_APOSMM.finalize() - - assert exit_code == FINISHED_PERSISTENT_GEN_TAG, "Standalone persistent_aposmm didn't exit correctly" - assert persis_info.get("run_order"), "Standalone persistent_aposmm didn't do any localopt runs" - - assert len(potential_minima) >= 6, f"Found {len(potential_minima)} minima" - - tol = 1e-3 - min_found = 0 - for m in minima: - # The minima are known on this test problem. - # We use their values to test APOSMM has identified all minima - print(np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)), flush=True) - if np.min(np.sum((H[H["local_min"]]["x"] - m) ** 2, 1)) < tol: - min_found += 1 - assert min_found >= 6, f"Found {min_found} minima" + # test initializing/using with default parameters: + my_APOSMM = APOSMM(vocs) + _evaluate_aposmm_instance(my_APOSMM) if __name__ == "__main__": From 4e73c93adf8f444d015990ba97d8523130bac0eb Mon Sep 17 00:00:00 2001 From: jlnav Date: Tue, 26 Aug 2025 14:26:27 -0500 Subject: [PATCH 5/6] replace completely-typed out gen_specs['user'] update from parameters with loop over fields and grabbing the value from locals, as suggested by shuds --- libensemble/gen_classes/aposmm.py | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/libensemble/gen_classes/aposmm.py b/libensemble/gen_classes/aposmm.py index ac792c31c..3fa1537ac 100644 --- a/libensemble/gen_classes/aposmm.py +++ b/libensemble/gen_classes/aposmm.py @@ -97,15 +97,23 @@ def __init__( gen_specs["user"]["lb"] = np.array([vocs.variables[i].domain[0] for i in vocs.variables]) gen_specs["user"]["ub"] = np.array([vocs.variables[i].domain[1] for i in vocs.variables]) - gen_specs["user"]["initial_sample_size"] = initial_sample_size if sample_points is not None: gen_specs["user"]["sample_points"] = sample_points - gen_specs["user"]["localopt_method"] = localopt_method - gen_specs["user"]["rk_const"] = rk_const - gen_specs["user"]["xtol_abs"] = xtol_abs - gen_specs["user"]["ftol_abs"] = ftol_abs - gen_specs["user"]["dist_to_bound_multiple"] = dist_to_bound_multiple - gen_specs["user"]["max_active_runs"] = max_active_runs + + FIELDS = [ + "initial_sample_size", + "localopt_method", + "rk_const", + "xtol_abs", + "ftol_abs", + "dist_to_bound_multiple", + "max_active_runs", + ] + + for k in FIELDS: + val = locals().get(k) + if val is not None: + gen_specs["user"][k] = val if not gen_specs.get("out"): # gen_specs never especially changes for aposmm even as the problem varies gen_specs["out"] = [ From 4357173fe51320991759cd63cf033a72df446983 Mon Sep 17 00:00:00 2001 From: jlnav Date: Wed, 27 Aug 2025 08:08:30 -0500 Subject: [PATCH 6/6] coverage adjusts --- libensemble/gen_classes/aposmm.py | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/libensemble/gen_classes/aposmm.py b/libensemble/gen_classes/aposmm.py index 3fa1537ac..b92bb3aa2 100644 --- a/libensemble/gen_classes/aposmm.py +++ b/libensemble/gen_classes/aposmm.py @@ -97,11 +97,9 @@ def __init__( gen_specs["user"]["lb"] = np.array([vocs.variables[i].domain[0] for i in vocs.variables]) gen_specs["user"]["ub"] = np.array([vocs.variables[i].domain[1] for i in vocs.variables]) - if sample_points is not None: - gen_specs["user"]["sample_points"] = sample_points - FIELDS = [ "initial_sample_size", + "sample_points", "localopt_method", "rk_const", "xtol_abs", @@ -115,15 +113,14 @@ def __init__( if val is not None: gen_specs["user"][k] = val - if not gen_specs.get("out"): # gen_specs never especially changes for aposmm even as the problem varies - gen_specs["out"] = [ - ("x", float, self.n), - ("x_on_cube", float, self.n), - ("sim_id", int), - ("local_min", bool), - ("local_pt", bool), - ] - gen_specs["persis_in"] = ["x", "f", "local_pt", "sim_id", "sim_ended", "x_on_cube", "local_min"] + gen_specs["out"] = [ + ("x", float, self.n), + ("x_on_cube", float, self.n), + ("sim_id", int), + ("local_min", bool), + ("local_pt", bool), + ] + gen_specs["persis_in"] = ["x", "f", "local_pt", "sim_id", "sim_ended", "x_on_cube", "local_min"] super().__init__(vocs, History, persis_info, gen_specs, libE_info, **kwargs) if not self.persis_info.get("nworkers"):