diff --git a/docs/source/_snippets/user_guide/optimizers.py b/docs/source/_snippets/user_guide/optimizers.py index 463addcd..edebc9e9 100644 --- a/docs/source/_snippets/user_guide/optimizers.py +++ b/docs/source/_snippets/user_guide/optimizers.py @@ -225,6 +225,117 @@ def objective(params): # [end:optuna_tpe] +# ============================================================================ +# Scipy Backend +# ============================================================================ + +# [start:scipy_imports] +from hyperactive.opt.scipy import ( + ScipyDifferentialEvolution, # Global: population-based + ScipyDualAnnealing, # Global: simulated annealing variant + ScipyBasinhopping, # Global: random perturbations + local search + ScipySHGO, # Global: finds multiple local minima + ScipyDirect, # Global: deterministic DIRECT algorithm + ScipyNelderMead, # Local: simplex-based + ScipyPowell, # Local: conjugate direction method +) +# [end:scipy_imports] + + +# Scipy uses continuous search spaces (tuples instead of arrays) +scipy_search_space = { + "x": (-5.0, 5.0), + "y": (-5.0, 5.0), +} + + +# [start:scipy_differential_evolution] +from hyperactive.opt.scipy import ScipyDifferentialEvolution + +optimizer = ScipyDifferentialEvolution( + param_space=scipy_search_space, + n_iter=100, + experiment=objective, + strategy="best1bin", + random_state=42, +) +# [end:scipy_differential_evolution] + + +# [start:scipy_dual_annealing] +from hyperactive.opt.scipy import ScipyDualAnnealing + +optimizer = ScipyDualAnnealing( + param_space=scipy_search_space, + n_iter=100, + experiment=objective, + random_state=42, +) +# [end:scipy_dual_annealing] + + +# [start:scipy_basinhopping] +from hyperactive.opt.scipy import ScipyBasinhopping + +optimizer = ScipyBasinhopping( + param_space=scipy_search_space, + n_iter=50, + experiment=objective, + minimizer_method="Nelder-Mead", + random_state=42, +) +# [end:scipy_basinhopping] + + +# [start:scipy_shgo] +from hyperactive.opt.scipy import ScipySHGO + +optimizer = ScipySHGO( + param_space=scipy_search_space, + n_iter=3, + experiment=objective, + n=50, + sampling_method="simplicial", +) +# [end:scipy_shgo] + + +# [start:scipy_direct] +from hyperactive.opt.scipy import ScipyDirect + +optimizer = ScipyDirect( + param_space=scipy_search_space, + n_iter=200, + experiment=objective, + locally_biased=True, +) +# [end:scipy_direct] + + +# [start:scipy_nelder_mead] +from hyperactive.opt.scipy import ScipyNelderMead + +optimizer = ScipyNelderMead( + param_space=scipy_search_space, + n_iter=200, + experiment=objective, + random_state=42, +) +# [end:scipy_nelder_mead] + + +# [start:scipy_powell] +from hyperactive.opt.scipy import ScipyPowell + +optimizer = ScipyPowell( + param_space=scipy_search_space, + n_iter=200, + experiment=objective, + random_state=42, +) +# [end:scipy_powell] + + # ============================================================================ # Configuration Examples # ============================================================================ diff --git a/docs/source/api_reference/optimizers/index.rst b/docs/source/api_reference/optimizers/index.rst index 75a0b879..060bcc2f 100644 --- a/docs/source/api_reference/optimizers/index.rst +++ b/docs/source/api_reference/optimizers/index.rst @@ -8,7 +8,7 @@ The :mod:`hyperactive.opt` module contains optimization algorithms for hyperpara All optimizers inherit from :class:`~hyperactive.base.BaseOptimizer` and share the same interface: the ``solve()`` method to run optimization, and configuration via the ``experiment`` and ``search_space`` parameters. -Hyperactive provides optimizers from three backends: +Hyperactive provides optimizers from four backends: .. list-table:: :widths: 25 75 @@ -20,6 +20,8 @@ Hyperactive provides optimizers from three backends: - Native gradient-free optimization algorithms (21 optimizers) * - :doc:`optuna` - Interface to Optuna's samplers (8 optimizers) + * - :doc:`scipy` + - Scipy.optimize algorithms for continuous spaces (7 optimizers) * - :doc:`sklearn` - sklearn-compatible search interfaces (2 optimizers) @@ -28,4 +30,5 @@ Hyperactive provides optimizers from three backends: gfo optuna + scipy sklearn diff --git a/docs/source/api_reference/optimizers/scipy.rst b/docs/source/api_reference/optimizers/scipy.rst new file mode 100644 index 00000000..b75db130 --- /dev/null +++ b/docs/source/api_reference/optimizers/scipy.rst @@ -0,0 +1,37 @@ +.. _optimizers_scipy_ref: + +Scipy +===== + +.. currentmodule:: hyperactive.opt + +The Scipy backend provides an interface to `scipy.optimize `_ +algorithms for continuous parameter optimization. + +.. note:: + + Scipy optimizers only support **continuous parameter spaces** (tuples). + For discrete or categorical parameters, use GFO or Optuna backends. + +Global Optimizers +----------------- + +.. autosummary:: + :toctree: ../auto_generated/ + :template: class.rst + + ScipyDifferentialEvolution + ScipyDualAnnealing + ScipyBasinhopping + ScipySHGO + ScipyDirect + +Local Optimizers +---------------- + +.. autosummary:: + :toctree: ../auto_generated/ + :template: class.rst + + ScipyNelderMead + ScipyPowell diff --git a/docs/source/examples.rst b/docs/source/examples.rst index c8b88c45..22fd6cb7 100644 --- a/docs/source/examples.rst +++ b/docs/source/examples.rst @@ -18,6 +18,7 @@ on GitHub. examples/population_based examples/sequential_model_based examples/optuna_backend + examples/scipy_backend examples/sklearn_backend examples/integrations examples/other @@ -61,6 +62,10 @@ Backend Examples Examples using Optuna's samplers including TPE, CMA-ES, NSGA-II/III, and Gaussian Process optimization. +:ref:`examples_scipy_backend` + Examples using scipy.optimize algorithms including Differential Evolution, + Dual Annealing, Basin-hopping, SHGO, DIRECT, Nelder-Mead, and Powell. + :ref:`examples_sklearn_backend` Scikit-learn compatible interfaces as drop-in replacements for GridSearchCV and RandomizedSearchCV. diff --git a/docs/source/examples/scipy_backend.rst b/docs/source/examples/scipy_backend.rst new file mode 100644 index 00000000..6585d441 --- /dev/null +++ b/docs/source/examples/scipy_backend.rst @@ -0,0 +1,104 @@ +.. _examples_scipy_backend: + +============= +Scipy Backend +============= + +Hyperactive provides wrappers for scipy.optimize algorithms, enabling +well-tested, production-grade optimization for continuous parameter spaces. + +.. note:: + + Scipy must be installed separately: + + .. code-block:: bash + + pip install scipy + # or + pip install hyperactive[all_extras] + + +Available Optimizers +-------------------- + +The Scipy backend provides 7 optimizers divided into global and local methods. + +**Global Optimizers** (5 algorithms): + +.. list-table:: + :header-rows: 1 + :widths: 30 70 + + * - Optimizer + - Description + * - ``ScipyDifferentialEvolution`` + - Population-based global optimizer. Robust for multi-modal landscapes. + * - ``ScipyDualAnnealing`` + - Combines classical simulated annealing with local search. + * - ``ScipyBasinhopping`` + - Random perturbations with local minimization. Good for finding global minima. + * - ``ScipySHGO`` + - Simplicial Homology Global Optimization. Finds multiple local minima. + * - ``ScipyDirect`` + - Deterministic DIRECT algorithm. No random seed required. + +**Local Optimizers** (2 algorithms): + +.. list-table:: + :header-rows: 1 + :widths: 30 70 + + * - Optimizer + - Description + * - ``ScipyNelderMead`` + - Simplex-based optimizer. Fast for smooth functions. + * - ``ScipyPowell`` + - Conjugate direction method. Often faster than Nelder-Mead. + + +Quick Example +------------- + +Scipy optimizers require continuous parameter spaces defined as tuples: + +.. code-block:: python + + from hyperactive.opt.scipy import ScipyDifferentialEvolution + + # Define a continuous search space (tuples, not arrays) + param_space = { + "x": (-5.0, 5.0), + "y": (-5.0, 5.0), + } + + def objective(params): + x, y = params["x"], params["y"] + return -(x**2 + y**2) # Maximize (minimize negative) + + optimizer = ScipyDifferentialEvolution( + param_space=param_space, + n_iter=100, + experiment=objective, + random_state=42, + ) + + best_params = optimizer.solve() + print(f"Best parameters: {best_params}") + + +When to Use Scipy Backend +------------------------- + +The Scipy backend is useful when: + +- **Continuous parameters only**: Your search space has no categorical or discrete values +- **Production-grade algorithms**: You need well-tested, reliable implementations +- **Specific scipy features**: You want scipy's differential evolution or simulated annealing +- **Deterministic optimization**: Use ``ScipyDirect`` for reproducible results without random seeds + + +See Also +-------- + +- :ref:`user_guide_optimizers_scipy` - Detailed guide with all optimizer examples +- :ref:`optimizers_scipy_ref` - API reference for all Scipy optimizers diff --git a/docs/source/user_guide/optimizers/index.rst b/docs/source/user_guide/optimizers/index.rst index 4babba3b..aa8ba400 100644 --- a/docs/source/user_guide/optimizers/index.rst +++ b/docs/source/user_guide/optimizers/index.rst @@ -4,7 +4,7 @@ Optimizers ========== -Hyperactive provides 31 algorithms across 5 categories and 3 backends. +Hyperactive provides 38 algorithms across 5 categories and 4 backends. Optimizers navigate the search space to find optimal parameters. Each implements a different strategy for balancing exploration (trying diverse regions) and exploitation (refining promising solutions). Local search methods like Hill Climbing work well for @@ -20,10 +20,10 @@ Algorithm Landscape
Hyperactive optimizer taxonomy showing 31 algorithms across GFO, Optuna, and sklearn backends Hyperactive optimizer taxonomy showing 31 algorithms across GFO, Optuna, and sklearn backends
@@ -133,6 +133,17 @@ Algorithm Categories *TPEOptimizer, CmaEsOptimizer, GPOptimizer, NSGAIIOptimizer, and more* + .. grid-item-card:: Scipy Backend + :link: scipy + :link-type: doc + :class-card: sd-border-secondary + + **7 algorithms** + ^^^ + Scipy.optimize algorithms for continuous parameter spaces. + + *DifferentialEvolution, DualAnnealing, Basinhopping, SHGO, Direct, NelderMead, Powell* + ---- Scenario Reference @@ -163,8 +174,11 @@ Detailed recommendations based on problem characteristics: - ``GridSearch`` - Exhaustive coverage when feasible * - Continuous parameters - - ``BayesianOptimizer``, ``CmaEsOptimizer`` + - ``BayesianOptimizer``, ``CmaEsOptimizer``, ``ScipyDifferentialEvolution`` - Designed for smooth, continuous spaces + * - Continuous only (scipy) + - ``ScipyDualAnnealing``, ``ScipyBasinhopping``, ``ScipyNelderMead`` + - Production-grade scipy.optimize implementations * - Mixed parameter types - ``TPEOptimizer``, ``RandomSearch`` - Handle categorical + continuous well @@ -191,4 +205,5 @@ All optimizers share common parameters and configuration options. population_based sequential_model_based optuna + scipy configuration diff --git a/docs/source/user_guide/optimizers/scipy.rst b/docs/source/user_guide/optimizers/scipy.rst new file mode 100644 index 00000000..6441989e --- /dev/null +++ b/docs/source/user_guide/optimizers/scipy.rst @@ -0,0 +1,117 @@ +.. _user_guide_optimizers_scipy: + +============= +Scipy Backend +============= + +Hyperactive provides wrappers for scipy.optimize algorithms, offering well-tested +implementations for continuous parameter optimization. Scipy optimizers support +only continuous parameter spaces defined as tuples. + + +Available Optimizers +-------------------- + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_imports] + :end-before: # [end:scipy_imports] + + +Example: ScipyDifferentialEvolution +----------------------------------- + +A robust global optimizer using differential evolution. Handles multi-modal +objective functions well: + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_differential_evolution] + :end-before: # [end:scipy_differential_evolution] + + +Example: ScipyDualAnnealing +--------------------------- + +Combines classical simulated annealing with local search. Effective for +problems with many local minima: + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_dual_annealing] + :end-before: # [end:scipy_dual_annealing] + + +Example: ScipyBasinhopping +-------------------------- + +Global optimization combining random perturbations with local refinement. Good for +finding global minima in multimodal landscapes: + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_basinhopping] + :end-before: # [end:scipy_basinhopping] + + +Example: ScipySHGO +------------------ + +Simplicial Homology Global Optimization. Finds multiple local minima and is +effective for low to moderate dimensional problems: + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_shgo] + :end-before: # [end:scipy_shgo] + + +Example: ScipyDirect +-------------------- + +Deterministic global optimizer using the DIRECT (DIviding RECTangles) algorithm. +Requires no random seed and is effective for Lipschitz-continuous functions: + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_direct] + :end-before: # [end:scipy_direct] + + +Example: ScipyNelderMead +------------------------ + +A simplex-based local optimizer. Fast convergence for smooth objective functions: + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_nelder_mead] + :end-before: # [end:scipy_nelder_mead] + + +Example: ScipyPowell +-------------------- + +Powell's conjugate direction method. A fast local optimizer that can outperform +Nelder-Mead in some cases: + +.. literalinclude:: ../../_snippets/user_guide/optimizers.py + :language: python + :start-after: # [start:scipy_powell] + :end-before: # [end:scipy_powell] + + +When to Use Scipy Backend +------------------------- + +The Scipy backend is useful when: + +- Your parameter space is purely continuous (no categorical or discrete values) +- You want well-tested, production-grade optimization algorithms +- You need specific scipy algorithms not available in other backends +- You prefer scipy's implementation of differential evolution or simulated annealing + +Choose ``ScipyDifferentialEvolution`` for robust global optimization. +Choose ``ScipyDualAnnealing`` for problems with many local minima. +Choose ``ScipyBasinhopping`` for global optimization with local refinement. +Choose ``ScipyNelderMead`` or ``ScipyPowell`` for fast local optimization. diff --git a/src/hyperactive/opt/__init__.py b/src/hyperactive/opt/__init__.py index da303a23..cb8dde18 100644 --- a/src/hyperactive/opt/__init__.py +++ b/src/hyperactive/opt/__init__.py @@ -38,6 +38,15 @@ RandomOptimizer, TPEOptimizer, ) +from .scipy import ( + ScipyBasinhopping, + ScipyDifferentialEvolution, + ScipyDirect, + ScipyDualAnnealing, + ScipyNelderMead, + ScipyPowell, + ScipySHGO, +) __all__ = [ "GridSearchSk", @@ -71,4 +80,11 @@ "NSGAIIOptimizer", "NSGAIIIOptimizer", "QMCOptimizer", + "ScipyBasinhopping", + "ScipyDifferentialEvolution", + "ScipyDirect", + "ScipyDualAnnealing", + "ScipyNelderMead", + "ScipyPowell", + "ScipySHGO", ] diff --git a/src/hyperactive/opt/_adapters/__init__.py b/src/hyperactive/opt/_adapters/__init__.py index 6e40d407..4683e4b3 100644 --- a/src/hyperactive/opt/_adapters/__init__.py +++ b/src/hyperactive/opt/_adapters/__init__.py @@ -1,7 +1,9 @@ """Adapters for individual packages.""" + # copyright: hyperactive developers, MIT License (see LICENSE file) from ._base_optuna_adapter import _BaseOptunaAdapter from ._gfo import _BaseGFOadapter +from ._base_scipy_adapter import _BaseScipyAdapter -__all__ = ["_BaseOptunaAdapter", "_BaseGFOadapter"] +__all__ = ["_BaseOptunaAdapter", "_BaseGFOadapter", "_BaseScipyAdapter"] diff --git a/src/hyperactive/opt/_adapters/_base_scipy_adapter.py b/src/hyperactive/opt/_adapters/_base_scipy_adapter.py new file mode 100644 index 00000000..7e7756ef --- /dev/null +++ b/src/hyperactive/opt/_adapters/_base_scipy_adapter.py @@ -0,0 +1,331 @@ +"""Base adapter for scipy.optimize optimizers.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +import time + +import numpy as np + +from hyperactive.base import BaseOptimizer + +__all__ = ["_BaseScipyAdapter"] + + +class _BaseScipyAdapter(BaseOptimizer): + """Base adapter class for scipy.optimize optimizers. + + This adapter handles the conversion between Hyperactive's interface and + scipy's optimization functions. Key responsibilities: + + * Search space conversion to scipy bounds format + * Score negation (scipy minimizes, Hyperactive maximizes) + * Array-to-dict parameter conversion + * Random state handling + * Time-based early stopping via callbacks + + Extension interface for subclasses: + + * ``_get_scipy_func``: Return the scipy optimization function + * ``_get_optimizer_kwargs``: Return optimizer-specific kwargs + * ``_get_iteration_param_name``: Return the parameter name for iterations + + Notes + ----- + Scipy optimizers are designed for continuous optimization. This adapter + only supports continuous parameter spaces (tuples). For discrete or + categorical parameters, use the optuna or gfo backends instead. + + Scipy minimizes objectives, while Hyperactive maximizes (higher scores + are better). This adapter negates scores when calling scipy functions. + """ + + _tags = { + "python_dependencies": ["scipy"], + "info:name": "Scipy-based optimizer", + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + experiment=None, + ): + self.param_space = param_space + self.n_iter = n_iter + self.max_time = max_time + self.initialize = initialize + self.random_state = random_state + self.experiment = experiment + super().__init__() + + def _get_scipy_func(self): + """Get the scipy optimization function to use. + + Returns + ------- + callable + The scipy optimization function. Must be a function from + ``scipy.optimize``. + + Raises + ------ + NotImplementedError + If not implemented by subclass. + """ + raise NotImplementedError( + "Subclasses must implement _get_scipy_func to return " + "the scipy optimization function." + ) + + def _get_optimizer_kwargs(self): + """Get optimizer-specific keyword arguments. + + Override this method in subclasses to pass algorithm-specific + parameters to the scipy optimization function. + + Returns + ------- + dict + Keyword arguments to pass to the optimizer. + Default is an empty dict. + """ + return {} + + def _get_iteration_param_name(self): + """Get the parameter name used for iteration control. + + Different scipy optimizers use different parameter names: + - differential_evolution: maxiter + - dual_annealing: maxiter + - basinhopping: niter + - shgo: iters + - direct: maxfun + + Returns + ------- + str + The parameter name for iteration control. + Default is "maxiter". + """ + return "maxiter" + + def _convert_to_scipy_space(self, param_space): + """Convert Hyperactive parameter space to scipy bounds format. + + Validates that all parameters are continuous (tuples) and converts + to scipy's bounds format: list of (low, high) tuples. + + Parameters + ---------- + param_space : dict[str, tuple] + The parameter space to convert. Keys are parameter names, + values must be tuples of (low, high) for continuous ranges. + + Returns + ------- + bounds : list of tuple + Scipy-compatible bounds as [(low, high), ...]. + param_names : list of str + Parameter names in the order matching bounds. + + Raises + ------ + ValueError + If parameter space contains non-tuple values (lists, arrays). + + Examples + -------- + >>> adapter = _BaseScipyAdapter() + >>> space = {"x": (0.0, 1.0), "y": (-5.0, 5.0)} + >>> bounds, names = adapter._convert_to_scipy_space(space) + >>> bounds + [(0.0, 1.0), (-5.0, 5.0)] + >>> names + ['x', 'y'] + """ + bounds = [] + param_names = [] + + for key, space in param_space.items(): + if isinstance(space, tuple) and len(space) == 2: + low, high = space + bounds.append((float(low), float(high))) + param_names.append(key) + elif isinstance(space, (list, np.ndarray)): + raise ValueError( + f"Scipy optimizers only support continuous parameter spaces. " + f"Parameter '{key}' has discrete values (list/array). " + f"Use optuna or gfo backends for discrete/categorical parameters." + ) + else: + raise ValueError( + f"Unsupported parameter space type for '{key}': {type(space)}. " + f"Expected tuple (low, high) for continuous range." + ) + + return bounds, param_names + + def _array_to_dict(self, x_array, param_names): + """Convert scipy array to Hyperactive parameter dictionary. + + Parameters + ---------- + x_array : np.ndarray + Array of parameter values from scipy optimizer. + param_names : list of str + Parameter names in order matching x_array. + + Returns + ------- + dict + Parameter dictionary with names as keys. + """ + return dict(zip(param_names, x_array)) + + def _get_x0_from_initialize(self, bounds, param_names): + """Extract initial point from initialize configuration. + + Parameters + ---------- + bounds : list of tuple + Scipy bounds as [(low, high), ...]. + param_names : list of str + Parameter names in order. + + Returns + ------- + np.ndarray or None + Initial point if warm_start provided, else None. + """ + if self.initialize is None: + return None + + if not isinstance(self.initialize, dict): + return None + + warm_start = self.initialize.get("warm_start") + if warm_start is None or not isinstance(warm_start, list): + return None + + if len(warm_start) == 0: + return None + + # Use first warm start point + point = warm_start[0] + x0 = np.array([point.get(name, (b[0] + b[1]) / 2) + for name, b in zip(param_names, bounds)]) + return x0 + + def _create_callback(self, start_time, max_time): + """Create a callback for time-based early stopping. + + Parameters + ---------- + start_time : float + Start time from time.time(). + max_time : float or None + Maximum time in seconds, or None for no limit. + + Returns + ------- + callable or None + Callback function that returns True to stop, or None. + """ + if max_time is None: + return None + + def callback(*args, **kwargs): + elapsed = time.time() - start_time + return elapsed > max_time + + return callback + + def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs): + """Run the scipy optimization. + + Parameters + ---------- + experiment : BaseExperiment + The experiment to optimize. + param_space : dict + The parameter space to search. + n_iter : int + Number of iterations. + max_time : float, optional + Maximum time in seconds. + **kwargs + Additional parameters (unused, for compatibility). + + Returns + ------- + dict + Best parameters found during optimization. + """ + # Convert search space + bounds, param_names = self._convert_to_scipy_space(param_space) + + # Create objective function (negated for minimization) + def objective(x): + params = self._array_to_dict(x, param_names) + score = experiment(params) + return -score # Negate for scipy minimization + + # Get scipy function and kwargs + scipy_func = self._get_scipy_func() + opt_kwargs = self._get_optimizer_kwargs() + + # Set iteration parameter + iter_param = self._get_iteration_param_name() + opt_kwargs[iter_param] = n_iter + + # Set random state if provided + if self.random_state is not None and "seed" not in opt_kwargs: + opt_kwargs["seed"] = self.random_state + + # Set up callback for time limit + start_time = time.time() + callback = self._create_callback(start_time, max_time) + if callback is not None: + opt_kwargs["callback"] = callback + + # Get initial point from warm start if available + x0 = self._get_x0_from_initialize(bounds, param_names) + if x0 is not None and "x0" not in opt_kwargs: + opt_kwargs["x0"] = x0 + + # Run optimization + result = scipy_func(objective, bounds, **opt_kwargs) + + # Extract best parameters + best_params = self._array_to_dict(result.x, param_names) + self.best_score_ = -result.fun # Negate back to maximization + + return best_params + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + ackley_exp = Ackley.create_test_instance() + + # Test with continuous ranges + params_continuous = { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 20, + "experiment": ackley_exp, + } + + return [params_continuous] diff --git a/src/hyperactive/opt/scipy/__init__.py b/src/hyperactive/opt/scipy/__init__.py new file mode 100644 index 00000000..c8801ece --- /dev/null +++ b/src/hyperactive/opt/scipy/__init__.py @@ -0,0 +1,26 @@ +"""Scipy optimization backend for Hyperactive. + +This module provides optimizers from scipy.optimize for continuous +parameter optimization. + +Note: Scipy optimizers only support continuous parameter spaces (tuples). +For discrete or categorical parameters, use optuna or gfo backends. +""" + +from ._basinhopping import ScipyBasinhopping +from ._differential_evolution import ScipyDifferentialEvolution +from ._direct import ScipyDirect +from ._dual_annealing import ScipyDualAnnealing +from ._nelder_mead import ScipyNelderMead +from ._powell import ScipyPowell +from ._shgo import ScipySHGO + +__all__ = [ + "ScipyBasinhopping", + "ScipyDifferentialEvolution", + "ScipyDirect", + "ScipyDualAnnealing", + "ScipyNelderMead", + "ScipyPowell", + "ScipySHGO", +] diff --git a/src/hyperactive/opt/scipy/_basinhopping.py b/src/hyperactive/opt/scipy/_basinhopping.py new file mode 100644 index 00000000..11aac849 --- /dev/null +++ b/src/hyperactive/opt/scipy/_basinhopping.py @@ -0,0 +1,283 @@ +"""Basin-hopping optimizer from scipy.optimize.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +import time + +import numpy as np + +from hyperactive.opt._adapters import _BaseScipyAdapter + +__all__ = ["ScipyBasinhopping"] + + +class ScipyBasinhopping(_BaseScipyAdapter): + """Scipy Basin-hopping optimizer. + + Basin-hopping is a global optimization algorithm that combines random + perturbations with local minimization. It is effective for: + + * Finding global minima in multimodal landscapes + * Problems where local optimization is efficient + * Continuous optimization problems + + Parameters + ---------- + param_space : dict[str, tuple] + The search space to explore. Dictionary with parameter names as keys. + Values must be tuples ``(low, high)`` for continuous ranges. + + n_iter : int, default=100 + Number of basin-hopping iterations (hops). + + max_time : float, optional + Maximum optimization time in seconds. + + initialize : dict, optional + Initialization configuration. Supports: + + * ``{"warm_start": [{"param1": val1, ...}, ...]}``: Start with + known good configurations (uses first point as x0) + + random_state : int, optional + Random seed for reproducibility. + + minimizer_method : str, default="Nelder-Mead" + Local minimization method. Derivative-free options: + + * ``"Nelder-Mead"``: Simplex algorithm (default, recommended) + * ``"Powell"``: Direction set method + * ``"L-BFGS-B"``: Limited-memory BFGS with bounds + * ``"COBYLA"``: Constrained optimization + + T : float, default=1.0 + Temperature for the Metropolis acceptance criterion. + Higher values increase acceptance of worse solutions. + + stepsize : float, default=0.5 + Initial step size for random perturbations. + + experiment : BaseExperiment, optional + The experiment to optimize. + + Attributes + ---------- + best_params_ : dict + Best parameters found after calling ``solve()``. + + best_score_ : float + Score of the best parameters found. + + See Also + -------- + ScipyDualAnnealing : Simulated annealing approach. + ScipyDifferentialEvolution : Population-based optimizer. + + References + ---------- + .. [1] Wales, D. J., & Doye, J. P. K. (1997). Global optimization by + basin-hopping and the lowest energy structures of Lennard-Jones + clusters. The Journal of Physical Chemistry A, 101(28), 5111-5116. + + Examples + -------- + >>> from hyperactive.experiment.bench import Ackley + >>> from hyperactive.opt.scipy import ScipyBasinhopping + + >>> ackley = Ackley.create_test_instance() + >>> optimizer = ScipyBasinhopping( + ... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)}, + ... n_iter=50, + ... minimizer_method="Nelder-Mead", + ... random_state=42, + ... experiment=ackley, + ... ) + >>> best_params = optimizer.solve() # doctest: +SKIP + """ + + _tags = { + "info:name": "Scipy Basin-hopping", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "mixed", + "info:compute": "middle", + "python_dependencies": ["scipy"], + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + minimizer_method="Nelder-Mead", + T=1.0, + stepsize=0.5, + experiment=None, + ): + self.minimizer_method = minimizer_method + self.T = T + self.stepsize = stepsize + + super().__init__( + param_space=param_space, + n_iter=n_iter, + max_time=max_time, + initialize=initialize, + random_state=random_state, + experiment=experiment, + ) + + def _get_scipy_func(self): + """Get the basinhopping function. + + Returns + ------- + callable + The ``scipy.optimize.basinhopping`` function. + """ + from scipy.optimize import basinhopping + + return basinhopping + + def _get_iteration_param_name(self): + """Get iteration parameter name. + + Returns + ------- + str + "niter" for basinhopping. + """ + return "niter" + + def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs): + """Run the basin-hopping optimization. + + Overrides base class to handle basinhopping's different API. + + Parameters + ---------- + experiment : BaseExperiment + The experiment to optimize. + param_space : dict + The parameter space to search. + n_iter : int + Number of basin-hopping iterations. + max_time : float, optional + Maximum time in seconds. + **kwargs + Additional parameters. + + Returns + ------- + dict + Best parameters found. + """ + from scipy.optimize import basinhopping + + # Convert search space + bounds, param_names = self._convert_to_scipy_space(param_space) + + # Create objective function (negated for minimization) + def objective(x): + params = self._array_to_dict(x, param_names) + score = experiment(params) + return -score + + # Get initial point + x0 = self._get_x0_from_initialize(bounds, param_names) + if x0 is None: + # Random initial point within bounds + rng = np.random.RandomState(self.random_state) + x0 = np.array([rng.uniform(low, high) for low, high in bounds]) + + # Set up minimizer kwargs with bounds + minimizer_kwargs = { + "method": self.minimizer_method, + "bounds": bounds, + } + + # Set up callback for time limit + start_time = time.time() + + def callback(x, f, accept): + if max_time is not None: + return time.time() - start_time > max_time + return False + + # Run optimization + result = basinhopping( + objective, + x0, + niter=n_iter, + T=self.T, + stepsize=self.stepsize, + minimizer_kwargs=minimizer_kwargs, + callback=callback, + seed=self.random_state, + ) + + # Extract best parameters + best_params = self._array_to_dict(result.x, param_names) + self.best_score_ = -result.fun + + return best_params + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + params = [] + + ackley_exp = Ackley.create_test_instance() + + # Test 1: Default configuration (Nelder-Mead) + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 10, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 2: Powell minimizer + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 10, + "minimizer_method": "Powell", + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 3: Higher temperature + params.append( + { + "param_space": { + "x0": (-3.0, 3.0), + "x1": (-3.0, 3.0), + }, + "n_iter": 15, + "T": 2.0, + "stepsize": 0.8, + "experiment": ackley_exp, + "random_state": 123, + } + ) + + return params diff --git a/src/hyperactive/opt/scipy/_differential_evolution.py b/src/hyperactive/opt/scipy/_differential_evolution.py new file mode 100644 index 00000000..2f6b9587 --- /dev/null +++ b/src/hyperactive/opt/scipy/_differential_evolution.py @@ -0,0 +1,245 @@ +"""Differential Evolution optimizer from scipy.optimize.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from hyperactive.opt._adapters import _BaseScipyAdapter + +__all__ = ["ScipyDifferentialEvolution"] + + +class ScipyDifferentialEvolution(_BaseScipyAdapter): + """Scipy Differential Evolution optimizer. + + Differential Evolution is a stochastic population-based optimization + algorithm. It is particularly effective for: + + * Global optimization over continuous spaces + * Non-differentiable and noisy objective functions + * Problems with many local minima + * Parallel evaluation scenarios + + Parameters + ---------- + param_space : dict[str, tuple] + The search space to explore. Dictionary with parameter names as keys. + Values must be tuples ``(low, high)`` for continuous ranges. + + n_iter : int, default=100 + Maximum number of generations (iterations). + + max_time : float, optional + Maximum optimization time in seconds. + + initialize : dict, optional + Initialization configuration. Supports: + + * ``{"warm_start": [{"param1": val1, ...}, ...]}``: Start with + known good configurations (uses first point as x0) + + random_state : int, optional + Random seed for reproducibility. + + strategy : str, default="best1bin" + Differential evolution strategy. Options include: + + * ``"best1bin"``: Best member with 1 difference vector, binomial crossover + * ``"best1exp"``: Best member with 1 difference vector, exponential crossover + * ``"rand1exp"``: Random member with 1 difference vector, exponential + * ``"randtobest1exp"``: Random-to-best with 1 difference vector + * ``"best2exp"``: Best with 2 difference vectors + * ``"rand2exp"``: Random with 2 difference vectors + * ``"currenttobest1bin"``: Current-to-best, binomial + + mutation : tuple or float, default=(0.5, 1.0) + Mutation constant (F). If tuple (min, max), dithering is used. + Typical range: [0.5, 2.0]. + + recombination : float, default=0.7 + Crossover probability (CR). Range: [0, 1]. + + popsize : int, default=15 + Population size multiplier. Total population = popsize * dimensions. + + experiment : BaseExperiment, optional + The experiment to optimize. + + Attributes + ---------- + best_params_ : dict + Best parameters found after calling ``solve()``. + + best_score_ : float + Score of the best parameters found. + + See Also + -------- + ScipyDualAnnealing : Simulated annealing variant. + ScipyBasinhopping : Global optimization with local refinement. + + References + ---------- + .. [1] Storn, R., & Price, K. (1997). Differential evolution - a simple + and efficient heuristic for global optimization over continuous + spaces. Journal of global optimization, 11(4), 341-359. + + Examples + -------- + Basic usage with a benchmark function: + + >>> from hyperactive.experiment.bench import Ackley + >>> from hyperactive.opt.scipy import ScipyDifferentialEvolution + + Create a benchmark experiment: + + >>> ackley = Ackley.create_test_instance() + + Configure the optimizer: + + >>> optimizer = ScipyDifferentialEvolution( + ... param_space={ + ... "x0": (-5.0, 5.0), + ... "x1": (-5.0, 5.0), + ... }, + ... n_iter=100, + ... strategy="best1bin", + ... random_state=42, + ... experiment=ackley, + ... ) + + Run optimization: + + >>> best_params = optimizer.solve() # doctest: +SKIP + """ + + _tags = { + "info:name": "Scipy Differential Evolution", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "mixed", + "info:compute": "low", + "python_dependencies": ["scipy"], + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + strategy="best1bin", + mutation=(0.5, 1.0), + recombination=0.7, + popsize=15, + experiment=None, + ): + self.strategy = strategy + self.mutation = mutation + self.recombination = recombination + self.popsize = popsize + + super().__init__( + param_space=param_space, + n_iter=n_iter, + max_time=max_time, + initialize=initialize, + random_state=random_state, + experiment=experiment, + ) + + def _get_scipy_func(self): + """Get the differential_evolution function. + + Returns + ------- + callable + The ``scipy.optimize.differential_evolution`` function. + """ + from scipy.optimize import differential_evolution + + return differential_evolution + + def _get_iteration_param_name(self): + """Get iteration parameter name. + + Returns + ------- + str + "maxiter" for differential_evolution. + """ + return "maxiter" + + def _get_optimizer_kwargs(self): + """Get differential evolution specific arguments. + + Returns + ------- + dict + Configuration arguments for differential_evolution. + """ + kwargs = { + "strategy": self.strategy, + "mutation": self.mutation, + "recombination": self.recombination, + "popsize": self.popsize, + } + return kwargs + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + params = [] + + ackley_exp = Ackley.create_test_instance() + + # Test 1: Default configuration + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 20, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 2: Custom strategy and mutation + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 20, + "strategy": "rand1bin", + "mutation": 0.8, + "recombination": 0.9, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 3: Larger population + params.append( + { + "param_space": { + "x0": (-3.0, 3.0), + "x1": (-3.0, 3.0), + }, + "n_iter": 30, + "popsize": 20, + "experiment": ackley_exp, + "random_state": 123, + } + ) + + return params diff --git a/src/hyperactive/opt/scipy/_direct.py b/src/hyperactive/opt/scipy/_direct.py new file mode 100644 index 00000000..28c980df --- /dev/null +++ b/src/hyperactive/opt/scipy/_direct.py @@ -0,0 +1,253 @@ +"""DIRECT (DIviding RECTangles) optimizer from scipy.optimize.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from hyperactive.opt._adapters import _BaseScipyAdapter + +__all__ = ["ScipyDirect"] + + +class ScipyDirect(_BaseScipyAdapter): + """Scipy DIRECT (DIviding RECTangles) optimizer. + + DIRECT is a deterministic derivative-free global optimization algorithm. + It is effective for: + + * Problems where deterministic behavior is required + * Lipschitz-continuous objective functions + * Low to moderate dimensional problems + * Finding approximate global optima efficiently + + Parameters + ---------- + param_space : dict[str, tuple] + The search space to explore. Dictionary with parameter names as keys. + Values must be tuples ``(low, high)`` for continuous ranges. + + n_iter : int, default=100 + Maximum number of function evaluations. + + max_time : float, optional + Maximum optimization time in seconds (not supported by DIRECT). + + initialize : dict, optional + Initialization configuration (not used by DIRECT). + + random_state : int, optional + Random seed (not used, DIRECT is deterministic). + + eps : float, default=1e-4 + Minimal required difference of the objective function values + between the current best and potential global minima. + + locally_biased : bool, default=True + If True, use locally biased DIRECT (more local refinement). + If False, use original DIRECT (more global exploration). + + experiment : BaseExperiment, optional + The experiment to optimize. + + Attributes + ---------- + best_params_ : dict + Best parameters found after calling ``solve()``. + + best_score_ : float + Score of the best parameters found. + + See Also + -------- + ScipySHGO : Another deterministic global optimizer. + ScipyDifferentialEvolution : Stochastic global optimizer. + + References + ---------- + .. [1] Jones, D. R., Perttunen, C. D., & Stuckman, B. E. (1993). + Lipschitzian optimization without the Lipschitz constant. + Journal of optimization Theory and Applications, 79(1), 157-181. + + Examples + -------- + >>> from hyperactive.experiment.bench import Ackley + >>> from hyperactive.opt.scipy import ScipyDirect + + >>> ackley = Ackley.create_test_instance() + >>> optimizer = ScipyDirect( + ... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)}, + ... n_iter=200, + ... experiment=ackley, + ... ) + >>> best_params = optimizer.solve() # doctest: +SKIP + """ + + _tags = { + "info:name": "Scipy DIRECT", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "low", + "python_dependencies": ["scipy"], + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + eps=1e-4, + locally_biased=True, + experiment=None, + ): + self.eps = eps + self.locally_biased = locally_biased + + super().__init__( + param_space=param_space, + n_iter=n_iter, + max_time=max_time, + initialize=initialize, + random_state=random_state, + experiment=experiment, + ) + + def _get_scipy_func(self): + """Get the direct function. + + Returns + ------- + callable + The ``scipy.optimize.direct`` function. + """ + from scipy.optimize import direct + + return direct + + def _get_iteration_param_name(self): + """Get iteration parameter name. + + Returns + ------- + str + "maxfun" for direct (controls function evaluations). + """ + return "maxfun" + + def _get_optimizer_kwargs(self): + """Get DIRECT specific arguments. + + Returns + ------- + dict + Configuration arguments for direct. + """ + kwargs = { + "eps": self.eps, + "locally_biased": self.locally_biased, + } + return kwargs + + def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs): + """Run the DIRECT optimization. + + Overrides base class to handle DIRECT's different API + (no seed, no callback, no x0). + + Parameters + ---------- + experiment : BaseExperiment + The experiment to optimize. + param_space : dict + The parameter space to search. + n_iter : int + Maximum number of function evaluations. + max_time : float, optional + Maximum time (not supported by DIRECT). + **kwargs + Additional parameters. + + Returns + ------- + dict + Best parameters found. + """ + from scipy.optimize import direct + + # Convert search space + bounds, param_names = self._convert_to_scipy_space(param_space) + + # Create objective function (negated for minimization) + def objective(x): + params = self._array_to_dict(x, param_names) + score = experiment(params) + return -score + + # Run optimization + result = direct( + objective, + bounds, + eps=self.eps, + maxfun=n_iter, + locally_biased=self.locally_biased, + ) + + # Extract best parameters + best_params = self._array_to_dict(result.x, param_names) + self.best_score_ = -result.fun + + return best_params + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + params = [] + + ackley_exp = Ackley.create_test_instance() + + # Test 1: Default configuration (locally biased) + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 100, + "experiment": ackley_exp, + } + ) + + # Test 2: Original DIRECT (not locally biased) + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 100, + "locally_biased": False, + "experiment": ackley_exp, + } + ) + + # Test 3: Higher precision + params.append( + { + "param_space": { + "x0": (-3.0, 3.0), + "x1": (-3.0, 3.0), + }, + "n_iter": 150, + "eps": 1e-6, + "experiment": ackley_exp, + } + ) + + return params diff --git a/src/hyperactive/opt/scipy/_dual_annealing.py b/src/hyperactive/opt/scipy/_dual_annealing.py new file mode 100644 index 00000000..2d076984 --- /dev/null +++ b/src/hyperactive/opt/scipy/_dual_annealing.py @@ -0,0 +1,231 @@ +"""Dual Annealing optimizer from scipy.optimize.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from hyperactive.opt._adapters import _BaseScipyAdapter + +__all__ = ["ScipyDualAnnealing"] + + +class ScipyDualAnnealing(_BaseScipyAdapter): + """Scipy Dual Annealing optimizer. + + Dual Annealing combines Classical Simulated Annealing with a fast + local search method. It is effective for: + + * Global optimization with many local minima + * Continuous optimization problems + * Problems where local refinement improves solutions + + Parameters + ---------- + param_space : dict[str, tuple] + The search space to explore. Dictionary with parameter names as keys. + Values must be tuples ``(low, high)`` for continuous ranges. + + n_iter : int, default=100 + Maximum number of global iterations. + + max_time : float, optional + Maximum optimization time in seconds. + + initialize : dict, optional + Initialization configuration. Supports: + + * ``{"warm_start": [{"param1": val1, ...}, ...]}``: Start with + known good configurations (uses first point as x0) + + random_state : int, optional + Random seed for reproducibility. + + initial_temp : float, default=5230.0 + Initial temperature for the annealing schedule. + + restart_temp_ratio : float, default=2e-5 + When temperature falls below ``initial_temp * restart_temp_ratio``, + the annealing restarts. + + visit : float, default=2.62 + Parameter for the visiting distribution. Higher values lead to + heavier tails (more global exploration). + + accept : float, default=-5.0 + Parameter for the acceptance distribution. More negative values + make acceptance stricter. + + no_local_search : bool, default=False + If True, disable local search refinement. + + experiment : BaseExperiment, optional + The experiment to optimize. + + Attributes + ---------- + best_params_ : dict + Best parameters found after calling ``solve()``. + + best_score_ : float + Score of the best parameters found. + + See Also + -------- + ScipyDifferentialEvolution : Population-based global optimizer. + ScipyBasinhopping : Another global-local hybrid approach. + + References + ---------- + .. [1] Tsallis, C. (1988). Possible generalization of Boltzmann-Gibbs + statistics. Journal of statistical physics, 52(1-2), 479-487. + + .. [2] Xiang, Y., et al. (2013). Generalized simulated annealing for + global optimization. Science, 220(4598), 671-680. + + Examples + -------- + >>> from hyperactive.experiment.bench import Ackley + >>> from hyperactive.opt.scipy import ScipyDualAnnealing + + >>> ackley = Ackley.create_test_instance() + >>> optimizer = ScipyDualAnnealing( + ... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)}, + ... n_iter=100, + ... random_state=42, + ... experiment=ackley, + ... ) + >>> best_params = optimizer.solve() # doctest: +SKIP + """ + + _tags = { + "info:name": "Scipy Dual Annealing", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "mixed", + "info:compute": "low", + "python_dependencies": ["scipy"], + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + initial_temp=5230.0, + restart_temp_ratio=2e-5, + visit=2.62, + accept=-5.0, + no_local_search=False, + experiment=None, + ): + self.initial_temp = initial_temp + self.restart_temp_ratio = restart_temp_ratio + self.visit = visit + self.accept = accept + self.no_local_search = no_local_search + + super().__init__( + param_space=param_space, + n_iter=n_iter, + max_time=max_time, + initialize=initialize, + random_state=random_state, + experiment=experiment, + ) + + def _get_scipy_func(self): + """Get the dual_annealing function. + + Returns + ------- + callable + The ``scipy.optimize.dual_annealing`` function. + """ + from scipy.optimize import dual_annealing + + return dual_annealing + + def _get_iteration_param_name(self): + """Get iteration parameter name. + + Returns + ------- + str + "maxiter" for dual_annealing. + """ + return "maxiter" + + def _get_optimizer_kwargs(self): + """Get dual annealing specific arguments. + + Returns + ------- + dict + Configuration arguments for dual_annealing. + """ + kwargs = { + "initial_temp": self.initial_temp, + "restart_temp_ratio": self.restart_temp_ratio, + "visit": self.visit, + "accept": self.accept, + "no_local_search": self.no_local_search, + } + return kwargs + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + params = [] + + ackley_exp = Ackley.create_test_instance() + + # Test 1: Default configuration + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 50, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 2: No local search + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 50, + "no_local_search": True, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 3: Custom temperature settings + params.append( + { + "param_space": { + "x0": (-3.0, 3.0), + "x1": (-3.0, 3.0), + }, + "n_iter": 30, + "initial_temp": 10000.0, + "visit": 2.8, + "experiment": ackley_exp, + "random_state": 123, + } + ) + + return params diff --git a/src/hyperactive/opt/scipy/_nelder_mead.py b/src/hyperactive/opt/scipy/_nelder_mead.py new file mode 100644 index 00000000..f1057d3e --- /dev/null +++ b/src/hyperactive/opt/scipy/_nelder_mead.py @@ -0,0 +1,259 @@ +"""Nelder-Mead optimizer from scipy.optimize.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +import numpy as np + +from hyperactive.opt._adapters import _BaseScipyAdapter + +__all__ = ["ScipyNelderMead"] + + +class ScipyNelderMead(_BaseScipyAdapter): + """Scipy Nelder-Mead simplex optimizer. + + Nelder-Mead is a derivative-free local optimization algorithm that uses + a simplex to explore the search space. It is effective for: + + * Local optimization and fine-tuning + * Low-dimensional problems (typically < 10 dimensions) + * Smooth objective functions + * Problems where derivatives are unavailable + + Note: This is a local optimizer. For global optimization, consider + using it with warm_start from a global optimizer's result. + + Parameters + ---------- + param_space : dict[str, tuple] + The search space to explore. Dictionary with parameter names as keys. + Values must be tuples ``(low, high)`` for continuous ranges. + + n_iter : int, default=100 + Maximum number of function evaluations. + + max_time : float, optional + Maximum optimization time in seconds. + + initialize : dict, optional + Initialization configuration. Supports: + + * ``{"warm_start": [{"param1": val1, ...}, ...]}``: Start with + known good configurations (uses first point as x0) + + random_state : int, optional + Random seed for initial point generation (if no warm_start). + + xatol : float, default=1e-4 + Absolute error in parameter values for convergence. + + fatol : float, default=1e-4 + Absolute error in objective function for convergence. + + adaptive : bool, default=True + Adapt algorithm parameters to dimensionality. + + experiment : BaseExperiment, optional + The experiment to optimize. + + Attributes + ---------- + best_params_ : dict + Best parameters found after calling ``solve()``. + + best_score_ : float + Score of the best parameters found. + + See Also + -------- + ScipyPowell : Another derivative-free local optimizer. + ScipyBasinhopping : Global optimizer with local refinement. + + References + ---------- + .. [1] Nelder, J. A., & Mead, R. (1965). A simplex method for function + minimization. The computer journal, 7(4), 308-313. + + Examples + -------- + >>> from hyperactive.experiment.bench import Ackley + >>> from hyperactive.opt.scipy import ScipyNelderMead + + >>> ackley = Ackley.create_test_instance() + >>> optimizer = ScipyNelderMead( + ... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)}, + ... n_iter=200, + ... random_state=42, + ... experiment=ackley, + ... ) + >>> best_params = optimizer.solve() # doctest: +SKIP + """ + + _tags = { + "info:name": "Scipy Nelder-Mead", + "info:local_vs_global": "local", + "info:explore_vs_exploit": "exploit", + "info:compute": "low", + "python_dependencies": ["scipy"], + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + xatol=1e-4, + fatol=1e-4, + adaptive=True, + experiment=None, + ): + self.xatol = xatol + self.fatol = fatol + self.adaptive = adaptive + + super().__init__( + param_space=param_space, + n_iter=n_iter, + max_time=max_time, + initialize=initialize, + random_state=random_state, + experiment=experiment, + ) + + def _get_scipy_func(self): + """Get the minimize function. + + Returns + ------- + callable + The ``scipy.optimize.minimize`` function. + """ + from scipy.optimize import minimize + + return minimize + + def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs): + """Run the Nelder-Mead optimization. + + Overrides base class to use scipy.optimize.minimize with + method='Nelder-Mead'. + + Parameters + ---------- + experiment : BaseExperiment + The experiment to optimize. + param_space : dict + The parameter space to search. + n_iter : int + Maximum number of function evaluations. + max_time : float, optional + Maximum time in seconds. + **kwargs + Additional parameters. + + Returns + ------- + dict + Best parameters found. + """ + from scipy.optimize import minimize + + # Convert search space + bounds, param_names = self._convert_to_scipy_space(param_space) + + # Create objective function (negated for minimization) + def objective(x): + params = self._array_to_dict(x, param_names) + score = experiment(params) + return -score + + # Get initial point + x0 = self._get_x0_from_initialize(bounds, param_names) + if x0 is None: + # Random initial point within bounds + rng = np.random.RandomState(self.random_state) + x0 = np.array([rng.uniform(low, high) for low, high in bounds]) + + # Set up options + options = { + "maxfev": n_iter, + "xatol": self.xatol, + "fatol": self.fatol, + "adaptive": self.adaptive, + } + + # Run optimization + result = minimize( + objective, + x0, + method="Nelder-Mead", + bounds=bounds, + options=options, + ) + + # Extract best parameters + best_params = self._array_to_dict(result.x, param_names) + self.best_score_ = -result.fun + + return best_params + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + params = [] + + ackley_exp = Ackley.create_test_instance() + + # Test 1: Default configuration + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 100, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 2: Tighter tolerances + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 200, + "xatol": 1e-6, + "fatol": 1e-6, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 3: Non-adaptive + params.append( + { + "param_space": { + "x0": (-3.0, 3.0), + "x1": (-3.0, 3.0), + }, + "n_iter": 150, + "adaptive": False, + "experiment": ackley_exp, + "random_state": 123, + } + ) + + return params diff --git a/src/hyperactive/opt/scipy/_powell.py b/src/hyperactive/opt/scipy/_powell.py new file mode 100644 index 00000000..dc6b92ac --- /dev/null +++ b/src/hyperactive/opt/scipy/_powell.py @@ -0,0 +1,253 @@ +"""Powell optimizer from scipy.optimize.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +import numpy as np + +from hyperactive.opt._adapters import _BaseScipyAdapter + +__all__ = ["ScipyPowell"] + + +class ScipyPowell(_BaseScipyAdapter): + """Scipy Powell's conjugate direction method optimizer. + + Powell's method is a derivative-free local optimization algorithm that + searches along conjugate directions. It is effective for: + + * Local optimization and fine-tuning + * Moderate dimensional problems + * Problems where derivatives are unavailable + * Faster convergence than Nelder-Mead in some cases + + Note: This is a local optimizer. For global optimization, consider + using it with warm_start from a global optimizer's result. + + Parameters + ---------- + param_space : dict[str, tuple] + The search space to explore. Dictionary with parameter names as keys. + Values must be tuples ``(low, high)`` for continuous ranges. + + n_iter : int, default=100 + Maximum number of function evaluations. + + max_time : float, optional + Maximum optimization time in seconds. + + initialize : dict, optional + Initialization configuration. Supports: + + * ``{"warm_start": [{"param1": val1, ...}, ...]}``: Start with + known good configurations (uses first point as x0) + + random_state : int, optional + Random seed for initial point generation (if no warm_start). + + xtol : float, default=1e-4 + Relative error in parameter values for convergence. + + ftol : float, default=1e-4 + Relative error in objective function for convergence. + + experiment : BaseExperiment, optional + The experiment to optimize. + + Attributes + ---------- + best_params_ : dict + Best parameters found after calling ``solve()``. + + best_score_ : float + Score of the best parameters found. + + See Also + -------- + ScipyNelderMead : Another derivative-free local optimizer. + ScipyBasinhopping : Global optimizer with local refinement. + + References + ---------- + .. [1] Powell, M. J. D. (1964). An efficient method for finding the + minimum of a function of several variables without calculating + derivatives. The computer journal, 7(2), 155-162. + + Examples + -------- + >>> from hyperactive.experiment.bench import Ackley + >>> from hyperactive.opt.scipy import ScipyPowell + + >>> ackley = Ackley.create_test_instance() + >>> optimizer = ScipyPowell( + ... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)}, + ... n_iter=200, + ... random_state=42, + ... experiment=ackley, + ... ) + >>> best_params = optimizer.solve() # doctest: +SKIP + """ + + _tags = { + "info:name": "Scipy Powell", + "info:local_vs_global": "local", + "info:explore_vs_exploit": "exploit", + "info:compute": "low", + "python_dependencies": ["scipy"], + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + xtol=1e-4, + ftol=1e-4, + experiment=None, + ): + self.xtol = xtol + self.ftol = ftol + + super().__init__( + param_space=param_space, + n_iter=n_iter, + max_time=max_time, + initialize=initialize, + random_state=random_state, + experiment=experiment, + ) + + def _get_scipy_func(self): + """Get the minimize function. + + Returns + ------- + callable + The ``scipy.optimize.minimize`` function. + """ + from scipy.optimize import minimize + + return minimize + + def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs): + """Run the Powell optimization. + + Overrides base class to use scipy.optimize.minimize with + method='Powell'. + + Parameters + ---------- + experiment : BaseExperiment + The experiment to optimize. + param_space : dict + The parameter space to search. + n_iter : int + Maximum number of function evaluations. + max_time : float, optional + Maximum time in seconds. + **kwargs + Additional parameters. + + Returns + ------- + dict + Best parameters found. + """ + from scipy.optimize import minimize + + # Convert search space + bounds, param_names = self._convert_to_scipy_space(param_space) + + # Create objective function (negated for minimization) + def objective(x): + params = self._array_to_dict(x, param_names) + score = experiment(params) + return -score + + # Get initial point + x0 = self._get_x0_from_initialize(bounds, param_names) + if x0 is None: + # Random initial point within bounds + rng = np.random.RandomState(self.random_state) + x0 = np.array([rng.uniform(low, high) for low, high in bounds]) + + # Set up options + options = { + "maxfev": n_iter, + "xtol": self.xtol, + "ftol": self.ftol, + } + + # Run optimization + result = minimize( + objective, + x0, + method="Powell", + bounds=bounds, + options=options, + ) + + # Extract best parameters + best_params = self._array_to_dict(result.x, param_names) + self.best_score_ = -result.fun + + return best_params + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + params = [] + + ackley_exp = Ackley.create_test_instance() + + # Test 1: Default configuration + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 100, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 2: Tighter tolerances + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 200, + "xtol": 1e-6, + "ftol": 1e-6, + "experiment": ackley_exp, + "random_state": 42, + } + ) + + # Test 3: Different search space + params.append( + { + "param_space": { + "x0": (-3.0, 3.0), + "x1": (-3.0, 3.0), + }, + "n_iter": 150, + "experiment": ackley_exp, + "random_state": 123, + } + ) + + return params diff --git a/src/hyperactive/opt/scipy/_shgo.py b/src/hyperactive/opt/scipy/_shgo.py new file mode 100644 index 00000000..08279486 --- /dev/null +++ b/src/hyperactive/opt/scipy/_shgo.py @@ -0,0 +1,257 @@ +"""SHGO (Simplicial Homology Global Optimization) from scipy.optimize.""" + +# copyright: hyperactive developers, MIT License (see LICENSE file) + +from hyperactive.opt._adapters import _BaseScipyAdapter + +__all__ = ["ScipySHGO"] + + +class ScipySHGO(_BaseScipyAdapter): + """Scipy SHGO (Simplicial Homology Global Optimization). + + SHGO is designed to find all local minima of a function, not just + the global minimum. It is effective for: + + * Problems where finding multiple local minima is valuable + * Continuous optimization with bounds + * Low to moderate dimensional problems + + Parameters + ---------- + param_space : dict[str, tuple] + The search space to explore. Dictionary with parameter names as keys. + Values must be tuples ``(low, high)`` for continuous ranges. + + n_iter : int, default=100 + Number of sampling iterations. + + max_time : float, optional + Maximum optimization time in seconds. + + initialize : dict, optional + Initialization configuration (not used by SHGO). + + random_state : int, optional + Random seed (not directly supported by SHGO). + + n : int, default=100 + Number of sampling points per iteration. + + sampling_method : str, default="simplicial" + Sampling method for generating points: + + * ``"simplicial"``: Sobol sequence based (default) + * ``"halton"``: Halton sequence + * ``"sobol"``: Pure Sobol sequence + + experiment : BaseExperiment, optional + The experiment to optimize. + + Attributes + ---------- + best_params_ : dict + Best parameters found after calling ``solve()``. + + best_score_ : float + Score of the best parameters found. + + See Also + -------- + ScipyDirect : Another deterministic global optimizer. + ScipyDifferentialEvolution : Stochastic global optimizer. + + References + ---------- + .. [1] Endres, S. C., Sandrock, C., & Focke, W. W. (2018). A simplicial + homology algorithm for Lipschitz optimisation. Journal of Global + Optimization, 72(2), 181-217. + + Examples + -------- + >>> from hyperactive.experiment.bench import Ackley + >>> from hyperactive.opt.scipy import ScipySHGO + + >>> ackley = Ackley.create_test_instance() + >>> optimizer = ScipySHGO( + ... param_space={"x0": (-5.0, 5.0), "x1": (-5.0, 5.0)}, + ... n_iter=3, + ... n=50, + ... experiment=ackley, + ... ) + >>> best_params = optimizer.solve() # doctest: +SKIP + """ + + _tags = { + "info:name": "Scipy SHGO", + "info:local_vs_global": "global", + "info:explore_vs_exploit": "explore", + "info:compute": "middle", + "python_dependencies": ["scipy"], + } + + def __init__( + self, + param_space=None, + n_iter=100, + max_time=None, + initialize=None, + random_state=None, + n=100, + sampling_method="simplicial", + experiment=None, + ): + self.n = n + self.sampling_method = sampling_method + + super().__init__( + param_space=param_space, + n_iter=n_iter, + max_time=max_time, + initialize=initialize, + random_state=random_state, + experiment=experiment, + ) + + def _get_scipy_func(self): + """Get the shgo function. + + Returns + ------- + callable + The ``scipy.optimize.shgo`` function. + """ + from scipy.optimize import shgo + + return shgo + + def _get_iteration_param_name(self): + """Get iteration parameter name. + + Returns + ------- + str + "iters" for shgo. + """ + return "iters" + + def _get_optimizer_kwargs(self): + """Get SHGO specific arguments. + + Returns + ------- + dict + Configuration arguments for shgo. + """ + kwargs = { + "n": self.n, + "sampling_method": self.sampling_method, + } + return kwargs + + def _solve(self, experiment, param_space, n_iter, max_time=None, **kwargs): + """Run the SHGO optimization. + + Overrides base class to handle SHGO's different API + (no seed, no callback). + + Parameters + ---------- + experiment : BaseExperiment + The experiment to optimize. + param_space : dict + The parameter space to search. + n_iter : int + Number of sampling iterations. + max_time : float, optional + Maximum time in seconds (not supported by SHGO). + **kwargs + Additional parameters. + + Returns + ------- + dict + Best parameters found. + """ + from scipy.optimize import shgo + + # Convert search space + bounds, param_names = self._convert_to_scipy_space(param_space) + + # Create objective function (negated for minimization) + def objective(x): + params = self._array_to_dict(x, param_names) + score = experiment(params) + return -score + + # Run optimization + result = shgo( + objective, + bounds, + n=self.n, + iters=n_iter, + sampling_method=self.sampling_method, + ) + + # Extract best parameters + best_params = self._array_to_dict(result.x, param_names) + self.best_score_ = -result.fun + + return best_params + + @classmethod + def get_test_params(cls, parameter_set="default"): + """Return testing parameter settings for the optimizer. + + Returns + ------- + list of dict + List of parameter configurations for testing. + """ + from hyperactive.experiment.bench import Ackley + + params = [] + + ackley_exp = Ackley.create_test_instance() + + # Test 1: Default configuration + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 2, + "n": 30, + "experiment": ackley_exp, + } + ) + + # Test 2: Halton sampling + params.append( + { + "param_space": { + "x0": (-5.0, 5.0), + "x1": (-5.0, 5.0), + }, + "n_iter": 2, + "n": 30, + "sampling_method": "halton", + "experiment": ackley_exp, + } + ) + + # Test 3: More sampling points + params.append( + { + "param_space": { + "x0": (-3.0, 3.0), + "x1": (-3.0, 3.0), + }, + "n_iter": 3, + "n": 50, + "experiment": ackley_exp, + } + ) + + return params diff --git a/src/hyperactive/tests/test_all_objects.py b/src/hyperactive/tests/test_all_objects.py index 559af35d..3858552e 100644 --- a/src/hyperactive/tests/test_all_objects.py +++ b/src/hyperactive/tests/test_all_objects.py @@ -358,6 +358,7 @@ def test_selection_direction_backend(self, object_instance): """ # Import backend bases to check optimizer type from hyperactive.opt._adapters._base_optuna_adapter import _BaseOptunaAdapter + from hyperactive.opt._adapters._base_scipy_adapter import _BaseScipyAdapter from hyperactive.opt._adapters._gfo import _BaseGFOadapter from hyperactive.opt.gridsearch._sk import GridSearchSk from hyperactive.opt.random_search import RandomSearchSk @@ -460,5 +461,55 @@ def _assert_good(best_params): _assert_good(best_params) return None + # Scipy adapters: use continuous space (scipy doesn't support discrete) + if isinstance(object_instance, _BaseScipyAdapter): + # Scipy requires continuous bounds, so use a small search space + # around the known good point (0, 0) + continuous_space = {"x0": (-1.0, 1.0), "x1": (-1.0, 1.0)} + continuous_cfg = _cfg_with_space(object_instance, exp, continuous_space) + inst = object_instance.clone().set_params( + **{ + **continuous_cfg, + "n_iter": 50, + "random_state": 0, + } + ) + best_params = inst.solve() + # Scipy should find params close to (0, 0) which is the optimum + # Allow some tolerance since scipy uses continuous optimization + assert isinstance(best_params, dict) + assert abs(best_params["x0"]) < 0.5, ( + f"Scipy optimizer should find x0 close to 0, got {best_params['x0']}" + ) + assert abs(best_params["x1"]) < 0.5, ( + f"Scipy optimizer should find x1 close to 0, got {best_params['x1']}" + ) + return None + # For other backends, no-op here; targeted direction tests live elsewhere return None + + def test_scipy_discrete_param_error(self, object_instance): + """Test that scipy optimizers raise clear error for discrete parameters.""" + from hyperactive.opt._adapters._base_scipy_adapter import _BaseScipyAdapter + + if not isinstance(object_instance, _BaseScipyAdapter): + return None + + import pytest + + from hyperactive.experiment.bench import Ackley + + exp = Ackley(d=2) + + # Try to use discrete space (list) which scipy doesn't support + discrete_space = {"x0": [0.0, 1.0, 2.0], "x1": [0.0, 1.0, 2.0]} + + inst = object_instance.clone().set_params( + param_space=discrete_space, + n_iter=10, + experiment=exp, + ) + + with pytest.raises(ValueError, match="only support continuous"): + inst.solve()