Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions src/hyperactive/base/_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,13 @@ class BaseOptimizer(BaseObject):
"info:compute": "middle", # "low", "middle", "high"
# see here for explanation of the tags:
# https://simonblanke.github.io/gradient-free-optimizers-documentation/1.5/optimizers/ # noqa: E501
# search space capabilities (conservative defaults)
"capability:discrete": True, # supports discrete lists
"capability:continuous": False, # supports continuous ranges
"capability:categorical": True, # supports categorical choices
"capability:log_scale": False, # supports log-scale sampling
"capability:conditions": False, # supports conditional params
"capability:constraints": False, # supports constraint functions
}

def __init__(self):
Expand Down Expand Up @@ -76,10 +83,27 @@ def solve(self):
The dict ``best_params`` can be used in ``experiment.score`` or
``experiment.evaluate`` directly.
"""
from hyperactive.opt._adapters._adapter_utils import adapt_search_space

experiment = self.get_experiment()
search_config = self.get_search_config()

# Adapt search space for backend capabilities (e.g., categorical encoding)
capabilities = {
"categorical": self.get_tag("capability:categorical"),
"continuous": self.get_tag("capability:continuous"),
}
experiment, search_config, adapter = adapt_search_space(
experiment, search_config, capabilities
)

# Run optimization
best_params = self._solve(experiment, **search_config)

# Decode results if adapter was used
if adapter is not None:
best_params = adapter.decode(best_params)

self.best_params_ = best_params
return best_params

Expand Down
82 changes: 82 additions & 0 deletions src/hyperactive/opt/_adapters/_adapter_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
"""Utility functions for search space adaptation."""
# copyright: hyperactive developers, MIT License (see LICENSE file)

from ._search_space_adapter import SearchSpaceAdapter

__all__ = ["adapt_search_space", "detect_search_space_key"]


def detect_search_space_key(search_config):
"""Find which key holds the search space in the config.

Parameters
----------
search_config : dict
The search configuration dictionary.

Returns
-------
str or None
The key name for search space, or None if not found.
"""
for key in ["search_space", "param_space", "param_grid", "param_distributions"]:
if key in search_config and search_config[key] is not None:
return key
return None


def adapt_search_space(experiment, search_config, capabilities):
"""Adapt search space and experiment for backend capabilities.

If the backend doesn't support certain search space features
(e.g., categorical values, continuous ranges), this function:
- Validates the search space format
- Encodes categorical dimensions (strings to integers)
- Discretizes continuous dimensions (tuples to lists)
- Wraps the experiment to decode parameters during scoring

Parameters
----------
experiment : BaseExperiment
The experiment to optimize.
search_config : dict
The search configuration containing the search space.
capabilities : dict
Backend capabilities, e.g., {"categorical": True, "continuous": False}.

Returns
-------
experiment : BaseExperiment
The experiment, possibly wrapped for decoding.
search_config : dict
The search config, possibly with encoded/discretized search space.
adapter : SearchSpaceAdapter or None
The adapter if adaptation was applied, None otherwise.

Raises
------
ValueError, TypeError
If the search space format is invalid.
"""
search_space_key = detect_search_space_key(search_config)

# No search space found - pass through unchanged
if not search_space_key or not search_config.get(search_space_key):
return experiment, search_config, None

# Create adapter with backend capabilities
adapter = SearchSpaceAdapter(search_config[search_space_key], capabilities)

# Validate search space format
adapter.validate()

# Backend supports all features - pass through unchanged
if not adapter.needs_adaptation:
return experiment, search_config, None

# Adaptation needed - transform search space and wrap experiment
encoded_config = search_config.copy()
encoded_config[search_space_key] = adapter.encode()
wrapped_experiment = adapter.wrap_experiment(experiment)

return wrapped_experiment, encoded_config, adapter
100 changes: 92 additions & 8 deletions src/hyperactive/opt/_adapters/_base_optuna_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,16 @@ class _BaseOptunaAdapter(BaseOptimizer):
_tags = {
"python_dependencies": ["optuna"],
"info:name": "Optuna-based optimizer",
# Search space capabilities
"capability:discrete": True,
"capability:continuous": True,
"capability:categorical": True,
"capability:log_scale": True,
}

def __init__(
self,
unified_space=None,
param_space=None,
n_trials=100,
initialize=None,
Expand All @@ -25,6 +31,7 @@ def __init__(
experiment=None,
**optimizer_kwargs,
):
self.unified_space = unified_space
self.param_space = param_space
self.n_trials = n_trials
self.initialize = initialize
Expand All @@ -35,6 +42,34 @@ def __init__(
self.optimizer_kwargs = optimizer_kwargs
super().__init__()

def get_search_config(self):
"""Get the search configuration.

Returns
-------
dict with str keys
The search configuration dictionary.
"""
search_config = super().get_search_config()

# Resolve: unified_space is converted to param_space
unified_space = search_config.pop("unified_space", None)
param_space = search_config.get("param_space")

# Validate: only one should be set
if unified_space is not None and param_space is not None:
raise ValueError(
"Provide either 'unified_space' or 'param_space', not both. "
"Use 'unified_space' for simple dict[str, list] format, "
"or 'param_space' for native Optuna format with ranges/distributions."
)

# Use unified_space if param_space is not set
if unified_space is not None:
search_config["param_space"] = unified_space

return search_config

def _get_optimizer(self):
"""Get the Optuna optimizer to use.

Expand Down Expand Up @@ -82,20 +117,66 @@ def _suggest_params(self, trial, param_space):
for key, space in param_space.items():
if hasattr(space, "suggest"): # optuna distribution object
params[key] = trial._suggest(space, key)
elif isinstance(space, tuple) and len(space) == 2:
# Tuples are treated as ranges (low, high)
low, high = space
if isinstance(low, int) and isinstance(high, int):
params[key] = trial.suggest_int(key, low, high)
else:
params[key] = trial.suggest_float(key, low, high, log=False)
elif isinstance(space, tuple):
# Tuples are continuous ranges in unified format
params[key] = self._suggest_continuous(trial, key, space)
elif isinstance(space, list):
# Lists are treated as categorical choices
params[key] = trial.suggest_categorical(key, space)
else:
raise ValueError(f"Invalid parameter space for key '{key}': {space}")
return params

def _suggest_continuous(self, trial, key, space):
"""Suggest a continuous parameter from a tuple specification.

Handles unified tuple formats:
- (low, high) - linear scale
- (low, high, "log") - log scale
- (low, high, n_points) - linear scale (n_points ignored for Optuna)
- (low, high, n_points, "log") - log scale (n_points ignored for Optuna)

Parameters
----------
trial : optuna.Trial
The Optuna trial object
key : str
The parameter name
space : tuple
The continuous range specification

Returns
-------
float or int
The suggested value
"""
if len(space) < 2:
raise ValueError(
f"Parameter '{key}': continuous range needs at least 2 values "
f"(low, high), got {len(space)}."
)

low, high = space[0], space[1]
log_scale = False

# Parse optional arguments
if len(space) == 3:
third = space[2]
if isinstance(third, str) and third.lower() == "log":
log_scale = True
# If third is int/float, it's n_points - ignore for Optuna
elif len(space) == 4:
# (low, high, n_points, "log")
fourth = space[3]
if isinstance(fourth, str) and fourth.lower() == "log":
log_scale = True

# Suggest based on type
if isinstance(low, int) and isinstance(high, int):
return trial.suggest_int(key, low, high, log=log_scale)
else:
return trial.suggest_float(key, low, high, log=log_scale)

def _objective(self, trial):
"""Objective function for Optuna optimization.

Expand All @@ -109,7 +190,7 @@ def _objective(self, trial):
float
The objective value
"""
params = self._suggest_params(trial, self.param_space)
params = self._suggest_params(trial, self._resolved_param_space)
score = self.experiment(params)

# Handle early stopping based on max_score
Expand Down Expand Up @@ -157,6 +238,9 @@ def _solve(self, experiment, param_space, n_trials, **kwargs):
"""
import optuna

# Store resolved param_space for use in _objective
self._resolved_param_space = param_space

# Create optimizer with random state if provided
optimizer = self._get_optimizer()

Expand Down
29 changes: 28 additions & 1 deletion src/hyperactive/opt/_adapters/_gfo.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@ class _BaseGFOadapter(BaseOptimizer):
_tags = {
"authors": "SimonBlanke",
"python_dependencies": ["gradient-free-optimizers>=1.5.0"],
# Search space capabilities
"capability:discrete": True,
"capability:continuous": False, # GFO needs lists, not (low, high) tuples
"capability:categorical": False, # GFO only supports numeric values
"capability:constraints": True,
}

def __init__(self):
Expand Down Expand Up @@ -55,9 +60,27 @@ def get_search_config(self):
search_config["initialize"] = self._initialize
del search_config["verbose"]

# Resolve: unified_space is converted to search_space
unified_space = search_config.pop("unified_space", None)
search_space = search_config.get("search_space")

# Validate: only one should be set
if unified_space is not None and search_space is not None:
raise ValueError(
"Provide either 'unified_space' or 'search_space', not both. "
"Use 'unified_space' for simple dict[str, list] format, "
"or 'search_space' for native GFO format."
)

# Use unified_space if search_space is not set
if unified_space is not None:
search_config["search_space"] = unified_space

search_config = self._handle_gfo_defaults(search_config)

search_config["search_space"] = self._to_dict_np(search_config["search_space"])
# Note: _to_dict_np is called in _solve(), after SearchSpaceAdapter processes
# continuous tuples. If we convert here, tuples like (1e-4, 1e-1, "log")
# would become numpy arrays with strings before the adapter can discretize them.

return search_config

Expand Down Expand Up @@ -130,6 +153,10 @@ def _solve(self, experiment, **search_config):
n_iter = search_config.pop("n_iter", 100)
max_time = search_config.pop("max_time", None)

# Convert search_space lists to numpy arrays (GFO requirement)
# This must happen after SearchSpaceAdapter has processed continuous tuples
search_config["search_space"] = self._to_dict_np(search_config["search_space"])

gfo_cls = self._get_gfo_class()
gfopt = gfo_cls(**search_config)

Expand Down
Loading
Loading