Skip to content

feat: Re-introduce graphs #193

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 53 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
53 commits
Select commit Hold shift + click to select a range
6794a05
Add BO over graphs
vladislavalerievich Jan 24, 2025
e94bbe8
Return the best graph from optimize_acqf_graph function
vladislavalerievich Jan 24, 2025
1587778
Refactor method that uses lru_cache into a standalone function
vladislavalerievich Jan 24, 2025
10221a9
Remove examples
vladislavalerievich Jan 24, 2025
10247b2
fix: ruff format
vladislavalerievich Jan 27, 2025
bd64fdc
fix: add grakel to dev dependencies
vladislavalerievich Jan 27, 2025
01d52ae
yo
eddiebergman Feb 7, 2025
92a7b26
chore: perf testing
Feb 7, 2025
0155e7e
optimizations on parsing and test
eddiebergman Feb 9, 2025
2d30ed7
fix weird numerics and new opt
eddiebergman Feb 9, 2025
d01fe9a
select
eddiebergman Feb 10, 2025
06b5d03
Test selection
eddiebergman Feb 10, 2025
91c6910
Rework mutations
eddiebergman Feb 10, 2025
149cc8c
Fix parsing
eddiebergman Feb 10, 2025
17b11a3
Fix mutation
eddiebergman Feb 10, 2025
0226334
fix: stop unpacking of nn.Sequential
timurcarstensen Feb 20, 2025
241d9da
tests: add mlp end-to-end test
timurcarstensen Feb 20, 2025
8f186fb
yo
eddiebergman Feb 7, 2025
670183a
chore: perf testing
Feb 7, 2025
6a1ac28
optimizations on parsing and test
eddiebergman Feb 9, 2025
38460a1
fix weird numerics and new opt
eddiebergman Feb 9, 2025
5e54588
select
eddiebergman Feb 10, 2025
3e272b4
Test selection
eddiebergman Feb 10, 2025
198d116
Rework mutations
eddiebergman Feb 10, 2025
383e5dd
Fix parsing
eddiebergman Feb 10, 2025
eb46f96
Fix mutation
eddiebergman Feb 10, 2025
39c62e9
fix: stop unpacking of nn.Sequential
timurcarstensen Feb 20, 2025
d1dc291
chore: move tests
timurcarstensen Feb 20, 2025
ea8281e
yo
eddiebergman Feb 7, 2025
c8dd67a
chore: perf testing
Feb 7, 2025
ba82313
optimizations on parsing and test
eddiebergman Feb 9, 2025
9d04f3d
fix weird numerics and new opt
eddiebergman Feb 9, 2025
b4652dd
select
eddiebergman Feb 10, 2025
af7aa5c
Test selection
eddiebergman Feb 10, 2025
38757a4
Rework mutations
eddiebergman Feb 10, 2025
39fa2af
Fix parsing
eddiebergman Feb 10, 2025
5301cd4
Fix mutation
eddiebergman Feb 10, 2025
537035b
fix: stop unpacking of nn.Sequential
timurcarstensen Feb 20, 2025
d152452
tests: add mlp end-to-end test
timurcarstensen Feb 20, 2025
b75dac0
chore: move tests
timurcarstensen Feb 20, 2025
fee035a
tests: mutate test
timurcarstensen Feb 20, 2025
ba4544a
Merge branch 'make-graphs-great-once-more' of github.com:automl/neps …
timurcarstensen Feb 20, 2025
3402059
Merge branch 'make-graphs-great-once-more' of github.com:automl/neps …
eddiebergman Feb 20, 2025
0a76b9b
feat(grammar): integrate grammar into search space
eddiebergman Feb 20, 2025
c69d816
style: cleanup root files
eddiebergman Feb 20, 2025
4479b00
fix(grammar): to_model handles deep nested passthroughs
eddiebergman Feb 20, 2025
404290e
Merge branch 'feat-graphs' into make-graphs-great-once-more
eddiebergman Feb 20, 2025
e96e1c4
style: typing fixes
eddiebergman Feb 20, 2025
c57398f
fix: raise explicit grammar not supported for optimizers
eddiebergman Feb 20, 2025
7f5165d
fix: graph tests
timurcarstensen Feb 20, 2025
98911f5
chore: deleting unused cli
timurcarstensen Feb 20, 2025
4c4ab01
fix: add comment to tests
timurcarstensen Feb 20, 2025
4ba9ab3
tmp
eddiebergman Feb 21, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions graph_playground.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
from __future__ import annotations

from dataclasses import dataclass

from graph import Grammar, mutations, parse, select, to_string


# Leafs
@dataclass
class T:
s: str

# This is the `op()`
def __call__(self) -> str:
return self.s


def join(*s: str) -> str:
return "[" + "".join(s) + "]"


grammar_1 = Grammar.from_dict(
{
"s": (["a", "b", "p a", "p p"], join),
"p": ["a b", "s"],
"a": T("a"),
"b": T("b"),
}
)

root = parse(grammar_1, "s(p(s(a), a))")

selections = list(select(root, how=("climb", range(1, 3))))
mutants = mutations(
root=root,
grammar=grammar_1,
which=selections,
max_mutation_depth=3,
)
mutants = list(mutants)

import rich

rich.print("grammar", grammar_1)
rich.print("root", f"{to_string(root)}")
rich.print("selections", [to_string(s) for s in selections])
rich.print("mutants", [to_string(m) for m in mutants])
3 changes: 2 additions & 1 deletion neps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from neps.optimizers.optimizer import SampledConfig
from neps.plot.plot import plot
from neps.plot.tensorboard_eval import tblogger
from neps.space import Categorical, Constant, Float, Integer, SearchSpace
from neps.space import Categorical, Constant, Float, Grammar, Integer, SearchSpace
from neps.state import BudgetInfo, Trial
from neps.status.status import status
from neps.utils.files import load_and_merge_yamls as load_yamls
Expand All @@ -15,6 +15,7 @@
"Categorical",
"Constant",
"Float",
"Grammar",
"Integer",
"SampledConfig",
"SearchSpace",
Expand Down
4 changes: 2 additions & 2 deletions neps/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
from ConfigSpace import ConfigurationSpace

from neps.optimizers.algorithms import CustomOptimizer
from neps.space import Parameter, SearchSpace
from neps.space import Constant, Grammar, Parameter, SearchSpace
from neps.state import EvaluatePipelineReturn

logger = logging.getLogger(__name__)
Expand All @@ -27,7 +27,7 @@
def run( # noqa: PLR0913
evaluate_pipeline: Callable[..., EvaluatePipelineReturn] | str,
pipeline_space: (
Mapping[str, dict | str | int | float | Parameter]
Mapping[str, dict | str | int | float | Parameter | Constant | Grammar]
| SearchSpace
| ConfigurationSpace
),
Expand Down
27 changes: 15 additions & 12 deletions neps/optimizers/algorithms.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def _bo(
f" Got: {pipeline_space.fidelities}"
)

parameters = pipeline_space.searchables
parameters = {**pipeline_space.numerical, **pipeline_space.categoricals}

match initial_design_size:
case "ndim":
Expand Down Expand Up @@ -126,9 +126,6 @@ def _bracket_optimizer( # noqa: C901, PLR0912, PLR0915
sampler: Literal["uniform", "prior", "priorband"] | PriorBandSampler | Sampler,
bayesian_optimization_kick_in_point: int | float | None,
sample_prior_first: bool | Literal["highest_fidelity"],
# NOTE: This is the only argument to get a default, since it
# is not required for hyperband style algorithms, only single bracket
# style ones.
early_stopping_rate: int | None,
device: torch.device | None,
) -> BracketOptimizer:
Expand Down Expand Up @@ -183,7 +180,7 @@ def _bracket_optimizer( # noqa: C901, PLR0912, PLR0915
"""
assert pipeline_space.fidelity is not None
fidelity_name, fidelity = pipeline_space.fidelity
parameters = pipeline_space.searchables
parameters = {**pipeline_space.numerical, **pipeline_space.categoricals}

if len(pipeline_space.fidelities) != 1:
raise ValueError(
Expand Down Expand Up @@ -324,9 +321,8 @@ def _bracket_optimizer( # noqa: C901, PLR0912, PLR0915


def determine_optimizer_automatically(space: SearchSpace) -> str:
has_prior = any(
parameter.prior is not None for parameter in space.searchables.values()
)
parameters = {**space.numerical, **space.categoricals}
has_prior = any(parameter.prior is not None for parameter in parameters.values())
has_fidelity = len(space.fidelities) > 0

match (has_prior, has_fidelity):
Expand Down Expand Up @@ -360,14 +356,18 @@ def random_search(
In this case, the max fidelity is always used.
"""
if ignore_fidelity:
parameters = pipeline_space.searchables
parameters = {**pipeline_space.numerical, **pipeline_space.categoricals}
else:
parameters = {**pipeline_space.searchables, **pipeline_space.fidelities}
parameters = {
**pipeline_space.numerical,
**pipeline_space.categoricals,
**pipeline_space.fidelities,
}

return RandomSearch(
space=pipeline_space,
encoder=ConfigEncoder.from_parameters(parameters),
sampler=(
numerical_sampler=(
Prior.from_parameters(parameters)
if use_priors
else Uniform(ndim=len(parameters))
Expand All @@ -384,6 +384,9 @@ def grid_search(pipeline_space: SearchSpace) -> GridSearch:
"""
from neps.optimizers.utils.grid import make_grid

if pipeline_space.grammar is not None:
raise NotImplementedError("Grammars not supported for `grid_search` yet.")

return GridSearch(configs_list=make_grid(pipeline_space))


Expand Down Expand Up @@ -445,7 +448,7 @@ def ifbo(
space, fid_bins = _adjust_space_to_match_stepsize(pipeline_space, step_size)
assert space.fidelity is not None
fidelity_name, fidelity = space.fidelity
parameters = space.searchables
parameters = {**pipeline_space.numerical, **pipeline_space.categoricals}

match initial_design_size:
case "ndim":
Expand Down
6 changes: 5 additions & 1 deletion neps/optimizers/bayesian_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,11 @@ def __call__(
n: int | None = None,
) -> SampledConfig | list[SampledConfig]:
assert self.space.fidelity is None, "Fidelity not supported yet."
parameters = self.space.searchables
parameters = {
**self.space.numerical,
**self.space.categoricals,
**self.space.grammars,
}

n_to_sample = 1 if n is None else n
n_sampled = len(trials)
Expand Down
8 changes: 7 additions & 1 deletion neps/optimizers/bracket_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,6 +249,12 @@ class BracketOptimizer:
fid_name: str
"""The name of the fidelity in the space."""

def __post_init__(self) -> None:
if self.space.grammar is not None:
raise NotImplementedError(
"Grammars not supported for `BracketOptimizer` yet."
)

def __call__( # noqa: C901, PLR0912
self,
trials: Mapping[str, Trial],
Expand All @@ -257,7 +263,7 @@ def __call__( # noqa: C901, PLR0912
) -> SampledConfig | list[SampledConfig]:
assert n is None, "TODO"
space = self.space
parameters = space.searchables
parameters = {**self.space.numerical, **self.space.categoricals}

# If we have no trials, we either go with the prior or just a sampled config
if len(trials) == 0:
Expand Down
6 changes: 5 additions & 1 deletion neps/optimizers/ifbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,10 @@ class IFBO:
Each one will be treated as an individual fidelity level.
"""

def __post_init__(self) -> None:
if self.space.grammar is not None:
raise NotImplementedError("Grammars not supported for `IFBO` yet.")

def __call__(
self,
trials: Mapping[str, Trial],
Expand All @@ -137,7 +141,7 @@ def __call__(
) -> SampledConfig | list[SampledConfig]:
assert self.space.fidelity is not None
fidelity_name, fidelity = self.space.fidelity
parameters = self.space.searchables
parameters = {**self.space.numerical, **self.space.categoricals}

assert n is None, "TODO"
ids = [int(config_id.split("_", maxsplit=1)[0]) for config_id in trials]
Expand Down
File renamed without changes.
65 changes: 65 additions & 0 deletions neps/optimizers/models/graphs/context_managers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
from __future__ import annotations

from collections.abc import Iterator
from contextlib import contextmanager
from typing import TYPE_CHECKING

from botorch.models import SingleTaskGP

from neps.optimizers.models.graphs.kernels import BoTorchWLKernel, compute_kernel

if TYPE_CHECKING:
import networkx as nx
from botorch.models.gp_regression_mixed import Kernel


@contextmanager
def set_graph_lookup(
kernel_or_gp: Kernel | SingleTaskGP,
new_graphs: list[nx.Graph],
*,
append: bool = True,
) -> Iterator[None]:
"""Context manager to temporarily set the graph lookup for a kernel or GP model.

Args:
kernel_or_gp (Kernel | SingleTaskGP): The kernel or GP model whose graph lookup is
to be set.
new_graphs (list[nx.Graph]): The new graphs to set in the graph lookup.
append (bool, optional): Whether to append the new graphs to the existing graph
lookup. Defaults to True.
"""
kernel_prev_graphs: list[tuple[Kernel, list[nx.Graph]]] = []

# Determine the modules to update based on the input type
if isinstance(kernel_or_gp, SingleTaskGP):
modules = [
k
for k in kernel_or_gp.covar_module.sub_kernels()
if isinstance(k, BoTorchWLKernel)
]
elif isinstance(kernel_or_gp, BoTorchWLKernel):
modules = [kernel_or_gp]
else:
assert hasattr(kernel_or_gp, "sub_kernels"), (
"Kernel module must have sub_kernels method."
)
modules = [
k for k in kernel_or_gp.sub_kernels() if isinstance(k, BoTorchWLKernel)
]

# Save the current graph lookup and set the new graph lookup
for kern in modules:
compute_kernel.cache_clear()

kernel_prev_graphs.append((kern, kern.graph_lookup))
if append:
kern.set_graph_lookup([*kern.graph_lookup, *new_graphs])
else:
kern.set_graph_lookup(new_graphs)

yield

# Restore the original graph lookup after the context manager exits
for kern, prev_graphs in kernel_prev_graphs:
kern.set_graph_lookup(prev_graphs)
Loading
Loading