From fc2df1f5fe55fe53e4792e4dabd6375abaf78675 Mon Sep 17 00:00:00 2001 From: Meganton Date: Mon, 28 Apr 2025 17:35:01 +0200 Subject: [PATCH 1/6] Enhances fidelity handling in optimization algorithms Adds `ignore_fidelity` parameter across optimizers for flexible fidelity management Introduces warnings and error handling for missing or unsupported fidelity/prior cases Updates grid, random, and Bayesian sampling to respect fidelity settings Improves test coverage for fidelity and prior requirements Fixes #118, #119, #120 --- neps/optimizers/algorithms.py | 202 ++++++++++++++++++++--- neps/optimizers/bayesian_optimization.py | 18 +- neps/optimizers/random_search.py | 8 + neps/optimizers/utils/grid.py | 4 +- tests/test_state/test_neps_state.py | 61 +++---- 5 files changed, 232 insertions(+), 61 deletions(-) diff --git a/neps/optimizers/algorithms.py b/neps/optimizers/algorithms.py index 6de8f67be..b94af85ac 100644 --- a/neps/optimizers/algorithms.py +++ b/neps/optimizers/algorithms.py @@ -7,6 +7,7 @@ the parameters available. You can pass these functoins to `neps.run()` if you like, otherwise you may also refer to them by their string name. """ + # NOTE: If updating this file with new optimizers, please be aware that # the documentation here is what is shown in the `neps.run()` documentation. # Heres a checklist: @@ -16,6 +17,7 @@ from __future__ import annotations +import logging from collections.abc import Callable, Mapping, Sequence from dataclasses import dataclass, field from functools import partial @@ -42,6 +44,8 @@ from neps.optimizers.utils.brackets import Bracket from neps.space import SearchSpace +logger = logging.getLogger(__name__) + def _bo( pipeline_space: SearchSpace, @@ -50,6 +54,7 @@ def _bo( use_priors: bool, cost_aware: bool | Literal["log"], sample_prior_first: bool, + ignore_fidelity: bool = False, device: torch.device | str | None, ) -> BayesianOptimization: """Initialise the BO loop. @@ -70,19 +75,25 @@ def _bo( If using `cost`, cost must be provided in the reports of the trials. sample_prior_first: Whether to sample the default configuration first. + ignore_fidelity: Whether to ignore fidelity when sampling. + In this case, the max fidelity is always used. device: Device to use for the optimization. Raises: ValueError: if initial_design_size < 1 + ValueError: if fidelity is not None and ignore_fidelity is False """ - if any(pipeline_space.fidelities): + if not ignore_fidelity and pipeline_space.fidelity is not None: raise ValueError( - "Fidelities are not supported for BayesianOptimization." - " Please consider setting the fidelity to a constant value." - f" Got: {pipeline_space.fidelities}" + "Fidelities are not supported for BayesianOptimization. Consider setting the" + " fidelity to a constant value or ignoring it using ignore_fidelity to" + f" always sample at max fidelity. Got fidelity: {pipeline_space.fidelities} " ) - parameters = pipeline_space.searchables + if ignore_fidelity: + parameters = {**pipeline_space.searchables, **pipeline_space.fidelities} + else: + parameters = {**pipeline_space.searchables} match initial_design_size: case "ndim": @@ -181,11 +192,16 @@ def _bracket_optimizer( # noqa: C901, PLR0912, PLR0915 sample_prior_first: Whether to sample the prior configuration first. device: If using Bayesian Optimization, the device to use for the optimization. """ - assert pipeline_space.fidelity is not None - fidelity_name, fidelity = pipeline_space.fidelity + if pipeline_space.fidelity is not None: + fidelity_name, fidelity = pipeline_space.fidelity + else: + raise ValueError( + "Fidelity is required for bracket optimizers like" + f" {bracket_type if sampler != 'priorband' else 'priorband'}." + ) parameters = pipeline_space.searchables - if len(pipeline_space.fidelities) != 1: + if len(pipeline_space.fidelities) > 1: raise ValueError( "Only one fidelity should be defined in the pipeline space." f"\nGot: {pipeline_space.fidelities}" @@ -196,6 +212,14 @@ def _bracket_optimizer( # noqa: C901, PLR0912, PLR0915 "sample_prior_first should be either True, False or 'highest_fidelity'" ) + if ( + sample_prior_first in (True, "highest_fidelity") or sampler == "prior" + ) and not any(parameter.prior is not None for parameter in parameters.values()): + raise ValueError( + "No priors given to sample from. Consider setting sample_prior_first=False" + " and sampler='uniform'." + ) + from neps.optimizers.utils import brackets # Determine the strategy for creating brackets for sampling @@ -346,7 +370,7 @@ def random_search( pipeline_space: SearchSpace, *, use_priors: bool = False, - ignore_fidelity: bool = True, + ignore_fidelity: bool | Literal["highest fidelity"] = False, ) -> RandomSearch: """A simple random search algorithm that samples configurations uniformly at random. @@ -359,10 +383,52 @@ def random_search( ignore_fidelity: Whether to ignore fidelity when sampling. In this case, the max fidelity is always used. """ - if ignore_fidelity: - parameters = pipeline_space.searchables - else: - parameters = {**pipeline_space.searchables, **pipeline_space.fidelities} + assert ignore_fidelity in ( + True, + False, + "highest fidelity", + ), "ignore_fidelity should be either True, False or 'highest fidelity'" + if not ignore_fidelity and pipeline_space.fidelity is not None: + raise ValueError( + "Fidelities are not supported for RandomSearch. Consider setting the" + " fidelity to a constant value, or setting ignore_fidelity to True to sample" + " from it like any other parameter or 'highest fidelity' to always sample at" + f" max fidelity. Got fidelity: {pipeline_space.fidelities} " + ) + if ignore_fidelity in (True, "highest fidelity") and pipeline_space.fidelity is None: + logger.warning( + "Warning: You are using ignore_fidelity, but no fidelity is defined in the" + " search space. Consider setting ignore_fidelity to False." + ) + match ignore_fidelity: + case True: + parameters = {**pipeline_space.searchables, **pipeline_space.fidelities} + case False: + parameters = {**pipeline_space.searchables} + case "highest fidelity": + parameters = {**pipeline_space.searchables} + + if use_priors and not any( + parameter.prior is not None for parameter in parameters.values() + ): + logger.warning( + "Warning: You are using priors, but no priors are defined in the search" + " space. Consider setting use_priors to False." + ) + + if not use_priors and any( + parameter.prior is not None for parameter in parameters.values() + ): + raise ValueError( + "To use priors, you must set use_priors=True. Got priors:" + f" { + [ + parameter + for parameter in parameters.values() + if parameter.prior is not None + ] + }" + ) return RandomSearch( space=pipeline_space, @@ -375,16 +441,34 @@ def random_search( ) -def grid_search(pipeline_space: SearchSpace) -> GridSearch: +def grid_search( + pipeline_space: SearchSpace, + *, + ignore_fidelity: bool, +) -> GridSearch: """A simple grid search algorithm which discretizes the search space and evaluates all possible configurations. Args: pipeline_space: The search space to sample from. + ignore_fidelity: Whether to ignore fidelity when sampling. + In this case, the max fidelity is always used. """ from neps.optimizers.utils.grid import make_grid - return GridSearch(configs_list=make_grid(pipeline_space)) + if any( + parameter.prior is not None for parameter in pipeline_space.searchables.values() + ): + raise ValueError("Grid search does not support priors.") + if ignore_fidelity and pipeline_space.fidelity is None: + logger.warning( + "Warning: You are using ignore_fidelity, but no fidelity is defined in the" + " search space. Consider setting ignore_fidelity to False." + ) + + return GridSearch( + configs_list=make_grid(pipeline_space, ignore_fidelity=ignore_fidelity) + ) def ifbo( @@ -440,13 +524,36 @@ def ifbo( """ from neps.optimizers.ifbo import _adjust_space_to_match_stepsize + if pipeline_space.fidelity is None: + raise ValueError("Fidelity is required for IFBO.") + # TODO: I'm not sure how this might effect tables, whose lowest fidelity # might be below to possibly increased lower bound. space, fid_bins = _adjust_space_to_match_stepsize(pipeline_space, step_size) - assert space.fidelity is not None - fidelity_name, fidelity = space.fidelity parameters = space.searchables + if use_priors and not any( + parameter.prior is not None for parameter in parameters.values() + ): + logger.warning( + "Warning: You are using priors, but no priors are defined in the search" + " space. Consider setting use_priors to False." + ) + + if not use_priors and any( + parameter.prior is not None for parameter in parameters.values() + ): + raise ValueError( + "To use priors, you must set use_priors=True. Got priors:" + f" { + [ + parameter + for parameter in parameters.values() + if parameter.prior is not None + ] + }" + ) + match initial_design_size: case "ndim": _initial_design_size = len(parameters) @@ -802,6 +909,11 @@ def priorband( `N` * `maximum_fidelity` worth of fidelity has been evaluated, proceed with bayesian optimization when sampling a new configuration. """ + if not any(parameter.prior is not None for parameter in space.searchables.values()): + logger.warning( + "Warning: No priors are defined in the search space, priorband will sample" + " uniformly. Consider using hyperband instead." + ) return _bracket_optimizer( pipeline_space=space, bracket_type=base, @@ -819,6 +931,7 @@ def bayesian_optimization( *, initial_design_size: int | Literal["ndim"] = "ndim", cost_aware: bool | Literal["log"] = False, + ignore_fidelity: bool = False, device: torch.device | str | None = None, ) -> BayesianOptimization: """Models the relation between hyperparameters in your `pipeline_space` @@ -859,8 +972,36 @@ def bayesian_optimization( If using `cost`, cost must be provided in the reports of the trials. + ignore_fidelity: Whether to ignore the fidelity parameter when sampling. + In this case, the max fidelity is always used. device: Device to use for the optimization. """ + + if not ignore_fidelity and space.fidelity is not None: + raise ValueError( + "Fidelities are not supported for BayesianOptimization. Consider setting the" + " fidelity to a constant value or ignoring it using ignore_fidelity to" + f" always sample at max fidelity. Got fidelity: {space.fidelities} " + ) + if ignore_fidelity and space.fidelity is None: + logger.warning( + "Warning: You are using ignore_fidelity, but no fidelity is defined in the" + " search space. Consider setting ignore_fidelity to False." + ) + + if any(parameter.prior is not None for parameter in space.searchables.values()): + raise ValueError( + "Bayesian optimization does not support priors. Consider using pibo instead." + " Got priors:" + f" { + [ + parameter + for parameter in space.searchables.values() + if parameter.prior is not None + ] + }" + ) + return _bo( pipeline_space=space, initial_design_size=initial_design_size, @@ -868,6 +1009,7 @@ def bayesian_optimization( device=device, use_priors=False, sample_prior_first=False, + ignore_fidelity=ignore_fidelity, ) @@ -878,6 +1020,7 @@ def pibo( cost_aware: bool | Literal["log"] = False, device: torch.device | str | None = None, sample_prior_first: bool = False, + ignore_fidelity: bool = False, ) -> BayesianOptimization: """A modification of [`bayesian_optimization`][neps.optimizers.algorithms.bayesian_optimization] @@ -899,15 +1042,27 @@ def pibo( cost_aware: Whether to consider reported "cost" from configurations in decision making. If True, the optimizer will weigh potential candidates by how much they cost, incentivising the optimizer to explore cheap, good performing - configurations. This amount is modified over time. If "log", the cost - will be log-transformed before being used. - - !!! warning - - If using `cost`, cost must be provided in the reports of the trials. + configurations. This amount is modified over time. If "log", the cost will be + log-transformed before being used. + !!! warning + If using `cost`, cost must be provided in the reports of the trials. device: Device to use for the optimization. + sample_prior_first: Whether to sample the prior configuration first. + ignore_fidelity: Whether to ignore the fidelity parameter when sampling. + In this case, the max fidelity is always used. """ + if not any(parameter.prior is not None for parameter in space.searchables.values()): + logger.warning( + "Warning: PiBO was called without any priors - using uniform priors on all" + " parameters.\nConsider using Bayesian Optimization instead." + ) + if ignore_fidelity and space.fidelity is None: + logger.warning( + "Warning: You are using ignore_fidelity, but no fidelity is defined in the" + " search space. Consider setting ignore_fidelity to False." + ) + return _bo( pipeline_space=space, initial_design_size=initial_design_size, @@ -915,6 +1070,7 @@ def pibo( device=device, use_priors=True, sample_prior_first=sample_prior_first, + ignore_fidelity=ignore_fidelity, ) diff --git a/neps/optimizers/bayesian_optimization.py b/neps/optimizers/bayesian_optimization.py index ec556803d..c8be0da41 100644 --- a/neps/optimizers/bayesian_optimization.py +++ b/neps/optimizers/bayesian_optimization.py @@ -85,8 +85,14 @@ def __call__( budget_info: BudgetInfo | None = None, n: int | None = None, ) -> SampledConfig | list[SampledConfig]: - assert self.space.fidelity is None, "Fidelity not supported yet." - parameters = self.space.searchables + # If fidelities exist, sample from them as normal + # This is a bit of a hack, as we set them to max fidelity + # afterwards, but we need the complete space to sample + + if self.space.fidelity is not None: + parameters = {**self.space.searchables, **self.space.fidelities} + else: + parameters = {**self.space.searchables} n_to_sample = 1 if n is None else n n_sampled = len(trials) @@ -117,6 +123,10 @@ def __call__( design_samples = design_samples[n_evaluated:] for sample in design_samples: sample.update(self.space.constants) + if self.space.fidelity is not None: + sample.update( + {key: value.upper for key, value in self.space.fidelities.items()} + ) sampled_configs.extend( [ @@ -193,6 +203,10 @@ def __call__( configs = encoder.decode(candidates) for config in configs: config.update(self.space.constants) + if self.space.fidelity is not None: + config.update( + {key: value.upper for key, value in self.space.fidelities.items()} + ) sampled_configs.extend( [ diff --git a/neps/optimizers/random_search.py b/neps/optimizers/random_search.py index 5b6742a6a..376baa782 100644 --- a/neps/optimizers/random_search.py +++ b/neps/optimizers/random_search.py @@ -33,6 +33,14 @@ def __call__( config_dicts = self.encoder.decode(configs) for config in config_dicts: config.update(self.space.constants) + if self.space.fidelity is not None: + config.update( + { + key: value.upper + for key, value in self.space.fidelities.items() + if key not in config + } + ) if n is None: config = config_dicts[0] diff --git a/neps/optimizers/utils/grid.py b/neps/optimizers/utils/grid.py index 7b7fbef5b..720dd7713 100644 --- a/neps/optimizers/utils/grid.py +++ b/neps/optimizers/utils/grid.py @@ -12,6 +12,7 @@ def make_grid( space: SearchSpace, *, size_per_numerical_hp: int = 10, + ignore_fidelity: bool = True, ) -> list[dict[str, Any]]: """Get a grid of configurations from the search space. @@ -38,7 +39,7 @@ def make_grid( case Constant(): param_ranges[name] = [hp.value] case Integer() | Float(): - if hp.is_fidelity: + if hp.is_fidelity and ignore_fidelity: param_ranges[name] = [hp.upper] continue @@ -53,7 +54,6 @@ def make_grid( param_ranges[name] = uniq_values case _: raise NotImplementedError(f"Unknown Parameter type: {type(hp)}\n{hp}") - values = product(*param_ranges.values()) keys = list(space.keys()) diff --git a/tests/test_state/test_neps_state.py b/tests/test_state/test_neps_state.py index 57b6db946..92c5d2252 100644 --- a/tests/test_state/test_neps_state.py +++ b/tests/test_state/test_neps_state.py @@ -84,43 +84,35 @@ def case_search_space_fid_with_prior() -> SearchSpace: "multifidelity_tpe", ] -OPTIMIZER_FAILS_WITH_FIDELITY = [ +# There's no programattic way to check if a class requires or +# doesnt support a fidelity/prior. +# See issue #118, #119, #120 +# For now, keep these lists up to date manually and xfail the tests +# that require a fidelity/prior. +REQUIRES_FIDELITY = [ + "successive_halving", + "asha", + "hyperband", + "async_hb", + "ifbo", + "priorband", +] +NO_DEFAULT_FIDELITY_SUPPORT = [ "random_search", - "bayesian_optimization_cost_aware", + "grid_search", "bayesian_optimization", - "bayesian_optimization_prior", "pibo", - "cost_cooling_bayesian_optimization", - "cost_cooling", ] - -# There's no programattic way to check if a class requires a fidelity. -# See issue #118, #119, #120 -OPTIMIZER_REQUIRES_FIDELITY = [ +NO_DEFAULT_PRIOR_SUPPORT = [ + "grid_search", + "bayesian_optimization", + "ifbo", "successive_halving", - "successive_halving_prior", "asha", - "asha_prior", "hyperband", - "hyperband_prior", "async_hb", - "async_hb_prior", - "priorband", - "priorband_sh", - "priorband_asha", - "priorband_async", - "priorband_bo", - "bayesian_optimization_cost_aware", - "mobster", - "ifbo", + "random_search", ] -REQUIRES_PRIOR = { - "priorband", - "priorband_bo", - "priorband_asha", - "priorband_asha_hyperband", -} -REQUIRES_COST = ["cost_cooling_bayesian_optimization", "cost_cooling"] @fixture @@ -132,13 +124,15 @@ def optimizer_and_key_and_search_space( if key in JUST_SKIP: pytest.xfail(f"{key} is not instantiable") - if key in REQUIRES_PRIOR and search_space.searchables["a"].prior is None: - pytest.xfail(f"{key} requires a prior") + if key in NO_DEFAULT_PRIOR_SUPPORT and any( + parameter.prior is not None for parameter in search_space.searchables.values() + ): + pytest.xfail(f"{key} crashed with a prior") - if len(search_space.fidelities) > 0 and key in OPTIMIZER_FAILS_WITH_FIDELITY: + if search_space.fidelity is not None and key in NO_DEFAULT_FIDELITY_SUPPORT: pytest.xfail(f"{key} crashed with a fidelity") - if key in OPTIMIZER_REQUIRES_FIDELITY and not len(search_space.fidelities) > 0: + if key in REQUIRES_FIDELITY and search_space.fidelity is None: pytest.xfail(f"{key} requires a fidelity parameter") kwargs: dict[str, Any] = {} @@ -171,10 +165,9 @@ def case_neps_state_filebased( def test_sample_trial( neps_state: NePSState, optimizer_and_key_and_search_space: tuple[AskFunction, str, SearchSpace], + capsys, ) -> None: optimizer, key, search_space = optimizer_and_key_and_search_space - if key in REQUIRES_COST and neps_state.lock_and_get_optimizer_state().budget is None: - pytest.xfail(f"{key} requires a cost budget") assert neps_state.lock_and_read_trials() == {} assert neps_state.lock_and_get_next_pending_trial() is None From 2126bde7785b78710e111d6db470b058a5430b84 Mon Sep 17 00:00:00 2001 From: Meganton Date: Mon, 28 Apr 2025 17:41:16 +0200 Subject: [PATCH 2/6] fix: set default value for ignore_fidelity in grid_search function --- neps/optimizers/algorithms.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/neps/optimizers/algorithms.py b/neps/optimizers/algorithms.py index b94af85ac..25112fd85 100644 --- a/neps/optimizers/algorithms.py +++ b/neps/optimizers/algorithms.py @@ -443,8 +443,7 @@ def random_search( def grid_search( pipeline_space: SearchSpace, - *, - ignore_fidelity: bool, + ignore_fidelity: bool = False, ) -> GridSearch: """A simple grid search algorithm which discretizes the search space and evaluates all possible configurations. From beee4cc0bcfecdba7248925a6cb200dee5f62646 Mon Sep 17 00:00:00 2001 From: Meganton Date: Mon, 28 Apr 2025 17:43:09 +0200 Subject: [PATCH 3/6] fix: simplify error message formatting in random_search function --- neps/optimizers/algorithms.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/neps/optimizers/algorithms.py b/neps/optimizers/algorithms.py index 25112fd85..50742e7be 100644 --- a/neps/optimizers/algorithms.py +++ b/neps/optimizers/algorithms.py @@ -421,14 +421,8 @@ def random_search( ): raise ValueError( "To use priors, you must set use_priors=True. Got priors:" - f" { - [ - parameter - for parameter in parameters.values() - if parameter.prior is not None - ] - }" - ) + f" {[parameter for parameter in parameters.values() + if parameter.prior is not None]}") return RandomSearch( space=pipeline_space, From 58794ef58ad2a197670efdfe46cd6b557c83e56c Mon Sep 17 00:00:00 2001 From: Meganton Date: Mon, 28 Apr 2025 17:52:02 +0200 Subject: [PATCH 4/6] fix: improve error messages for prior usage in optimization functions --- neps/optimizers/algorithms.py | 38 +++++++++++++---------------------- 1 file changed, 14 insertions(+), 24 deletions(-) diff --git a/neps/optimizers/algorithms.py b/neps/optimizers/algorithms.py index 25112fd85..34d0f74e6 100644 --- a/neps/optimizers/algorithms.py +++ b/neps/optimizers/algorithms.py @@ -419,15 +419,11 @@ def random_search( if not use_priors and any( parameter.prior is not None for parameter in parameters.values() ): + priors = [ + parameter for parameter in parameters.values() if parameter.prior is not None + ] raise ValueError( - "To use priors, you must set use_priors=True. Got priors:" - f" { - [ - parameter - for parameter in parameters.values() - if parameter.prior is not None - ] - }" + f"To use priors, you must set use_priors=True. Got priors: {priors}" ) return RandomSearch( @@ -542,15 +538,11 @@ def ifbo( if not use_priors and any( parameter.prior is not None for parameter in parameters.values() ): + priors = [ + parameter for parameter in parameters.values() if parameter.prior is not None + ] raise ValueError( - "To use priors, you must set use_priors=True. Got priors:" - f" { - [ - parameter - for parameter in parameters.values() - if parameter.prior is not None - ] - }" + f"To use priors, you must set use_priors=True. Got priors: {priors}" ) match initial_design_size: @@ -989,16 +981,14 @@ def bayesian_optimization( ) if any(parameter.prior is not None for parameter in space.searchables.values()): + priors = [ + parameter + for parameter in space.searchables.values() + if parameter.prior is not None + ] raise ValueError( "Bayesian optimization does not support priors. Consider using pibo instead." - " Got priors:" - f" { - [ - parameter - for parameter in space.searchables.values() - if parameter.prior is not None - ] - }" + f" Got priors: {priors}" ) return _bo( From bd0e77b7ac6b111534e81bdcd1e4fff26976eb40 Mon Sep 17 00:00:00 2001 From: Meganton Date: Fri, 2 May 2025 12:39:51 +0200 Subject: [PATCH 5/6] fix: clarify prior parameter requirements and error handling in documentation --- docs/reference/pipeline_space.md | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/docs/reference/pipeline_space.md b/docs/reference/pipeline_space.md index ee0955ed4..233b22866 100644 --- a/docs/reference/pipeline_space.md +++ b/docs/reference/pipeline_space.md @@ -77,15 +77,10 @@ neps.run( } ) ``` -!!! warning "Must set `prior=` for all parameters, if any" - - If you specify `prior=` for one parameter, you must do so for all your variables. - This will be improved in future versions. !!! warning "Interaction with `is_fidelity`" - If you specify `is_fidelity=True` for one parameter, the `prior=` and `prior_confidence=` are ignored. - This will be dissallowed in future versions. + If you specify `is_fidelity=True` and `prior=` for one parameter, this will raise an error. ## Defining a pipeline space using YAML Create a YAML file (e.g., `./pipeline_space.yaml`) with the parameter definitions following this structure. From 151e0c046353e36001f0ced63e631e0230bc0e93 Mon Sep 17 00:00:00 2001 From: Meganton Date: Mon, 12 May 2025 19:51:53 +0200 Subject: [PATCH 6/6] fix: add noqa comments to suppress linting warnings in algorithms and bayesian optimization modules --- neps/optimizers/algorithms.py | 2 +- neps/optimizers/bayesian_optimization.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/neps/optimizers/algorithms.py b/neps/optimizers/algorithms.py index 34d0f74e6..0794a69d3 100644 --- a/neps/optimizers/algorithms.py +++ b/neps/optimizers/algorithms.py @@ -439,7 +439,7 @@ def random_search( def grid_search( pipeline_space: SearchSpace, - ignore_fidelity: bool = False, + ignore_fidelity: bool = False, # noqa: FBT001, FBT002 ) -> GridSearch: """A simple grid search algorithm which discretizes the search space and evaluates all possible configurations. diff --git a/neps/optimizers/bayesian_optimization.py b/neps/optimizers/bayesian_optimization.py index c8be0da41..f29de0a60 100644 --- a/neps/optimizers/bayesian_optimization.py +++ b/neps/optimizers/bayesian_optimization.py @@ -79,7 +79,7 @@ class BayesianOptimization: device: torch.device | None """The device to use for the optimization.""" - def __call__( + def __call__( # noqa: C901, PLR0912 self, trials: Mapping[str, Trial], budget_info: BudgetInfo | None = None,