Skip to content

gds_psuu.optimizers

Search strategy implementations.

Base

Abstract base class for optimizers.

Optimizer

Bases: ABC

Base class for parameter search optimizers.

Subclasses implement the suggest/observe loop. The optimizer is stateful and mutable — it tracks which points have been evaluated and uses that information to decide what to try next.

Source code in packages/gds-psuu/gds_psuu/optimizers/base.py
class Optimizer(ABC):
    """Base class for parameter search optimizers.

    Subclasses implement the suggest/observe loop. The optimizer is stateful
    and mutable — it tracks which points have been evaluated and uses that
    information to decide what to try next.
    """

    @abstractmethod
    def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
        """Initialize the optimizer with the search space and KPI names."""

    @abstractmethod
    def suggest(self) -> ParamPoint:
        """Suggest the next parameter point to evaluate."""

    @abstractmethod
    def observe(self, params: ParamPoint, scores: KPIScores) -> None:
        """Record the result of evaluating a parameter point."""

    @abstractmethod
    def is_exhausted(self) -> bool:
        """Return True if no more suggestions are available."""

setup(space, kpi_names) abstractmethod

Initialize the optimizer with the search space and KPI names.

Source code in packages/gds-psuu/gds_psuu/optimizers/base.py
@abstractmethod
def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
    """Initialize the optimizer with the search space and KPI names."""

suggest() abstractmethod

Suggest the next parameter point to evaluate.

Source code in packages/gds-psuu/gds_psuu/optimizers/base.py
@abstractmethod
def suggest(self) -> ParamPoint:
    """Suggest the next parameter point to evaluate."""

observe(params, scores) abstractmethod

Record the result of evaluating a parameter point.

Source code in packages/gds-psuu/gds_psuu/optimizers/base.py
@abstractmethod
def observe(self, params: ParamPoint, scores: KPIScores) -> None:
    """Record the result of evaluating a parameter point."""

is_exhausted() abstractmethod

Return True if no more suggestions are available.

Source code in packages/gds-psuu/gds_psuu/optimizers/base.py
@abstractmethod
def is_exhausted(self) -> bool:
    """Return True if no more suggestions are available."""

Grid search optimizer — exhaustive cartesian product search.

GridSearchOptimizer

Bases: Optimizer

Evaluates every point in a regular grid over the parameter space.

For Continuous dimensions, n_steps evenly spaced values are used. For Integer dimensions, all integers in [min, max] are used. For Discrete dimensions, all values are used.

Source code in packages/gds-psuu/gds_psuu/optimizers/grid.py
class GridSearchOptimizer(Optimizer):
    """Evaluates every point in a regular grid over the parameter space.

    For Continuous dimensions, ``n_steps`` evenly spaced values are used.
    For Integer dimensions, all integers in [min, max] are used.
    For Discrete dimensions, all values are used.
    """

    def __init__(self, n_steps: int = 5) -> None:
        self._n_steps = n_steps
        self._grid: list[ParamPoint] = []
        self._cursor: int = 0

    def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
        self._grid = space.grid_points(self._n_steps)
        self._cursor = 0

    def suggest(self) -> ParamPoint:
        point = self._grid[self._cursor]
        self._cursor += 1
        return point

    def observe(self, params: ParamPoint, scores: KPIScores) -> None:
        pass  # Grid search doesn't adapt

    def is_exhausted(self) -> bool:
        return self._cursor >= len(self._grid)

Random search optimizer — uniform random sampling.

RandomSearchOptimizer

Bases: Optimizer

Samples parameter points uniformly at random.

Uses stdlib random.Random for reproducibility — no numpy required. When the parameter space has constraints, uses rejection sampling with a configurable retry limit.

Source code in packages/gds-psuu/gds_psuu/optimizers/random.py
class RandomSearchOptimizer(Optimizer):
    """Samples parameter points uniformly at random.

    Uses stdlib ``random.Random`` for reproducibility — no numpy required.
    When the parameter space has constraints, uses rejection sampling
    with a configurable retry limit.
    """

    def __init__(self, n_samples: int = 20, seed: int | None = None) -> None:
        self._n_samples = n_samples
        self._rng = random.Random(seed)
        self._space: ParameterSpace | None = None
        self._count: int = 0

    def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
        self._space = space
        self._count = 0

    def _sample_point(self) -> ParamPoint:
        assert self._space is not None
        point: ParamPoint = {}
        for name, dim in self._space.params.items():
            if isinstance(dim, Continuous):
                point[name] = self._rng.uniform(dim.min_val, dim.max_val)
            elif isinstance(dim, Integer):
                point[name] = self._rng.randint(dim.min_val, dim.max_val)
            elif isinstance(dim, Discrete):
                point[name] = self._rng.choice(dim.values)
        return point

    def suggest(self) -> ParamPoint:
        assert self._space is not None, "Call setup() before suggest()"
        if not self._space.constraints:
            point = self._sample_point()
            self._count += 1
            return point

        for _ in range(_MAX_REJECTION_RETRIES):
            point = self._sample_point()
            if self._space.is_feasible(point):
                self._count += 1
                return point

        raise PsuuSearchError(
            f"Could not find a feasible point after {_MAX_REJECTION_RETRIES} "
            "random samples. The feasible region may be too small."
        )

    def observe(self, params: ParamPoint, scores: KPIScores) -> None:
        pass  # Random search doesn't adapt

    def is_exhausted(self) -> bool:
        return self._count >= self._n_samples

Bayesian (optional)

Bayesian optimizer — wraps optuna (optional dependency).

BayesianOptimizer

Bases: Optimizer

Bayesian optimization using optuna's TPE sampler.

Requires optuna. Install with::

uv add gds-psuu[bayesian]

Optimizes a single target KPI (by default the first one registered).

Source code in packages/gds-psuu/gds_psuu/optimizers/bayesian.py
class BayesianOptimizer(Optimizer):
    """Bayesian optimization using optuna's TPE sampler.

    Requires ``optuna``. Install with::

        uv add gds-psuu[bayesian]

    Optimizes a single target KPI (by default the first one registered).
    """

    def __init__(
        self,
        n_trials: int = 20,
        target_kpi: str | None = None,
        maximize: bool = True,
        seed: int | None = None,
    ) -> None:
        if not _HAS_OPTUNA:  # pragma: no cover
            raise ImportError(
                "optuna is required for BayesianOptimizer. "
                "Install with: uv add gds-psuu[bayesian]"
            )
        self._n_trials = n_trials
        self._target_kpi = target_kpi
        self._maximize = maximize
        self._seed = seed
        self._study: Any = None
        self._space: ParameterSpace | None = None
        self._param_names: list[str] = []
        self._count: int = 0
        self._current_trial: Any = None

    def setup(self, space: ParameterSpace, kpi_names: list[str]) -> None:
        if self._target_kpi is None:
            self._target_kpi = kpi_names[0]
        elif self._target_kpi not in kpi_names:
            raise PsuuSearchError(
                f"Target KPI '{self._target_kpi}' not found in {kpi_names}"
            )

        self._space = space
        self._param_names = space.dimension_names

        sampler = optuna.samplers.TPESampler(seed=self._seed)
        direction = "maximize" if self._maximize else "minimize"
        optuna.logging.set_verbosity(optuna.logging.WARNING)
        self._study = optuna.create_study(
            direction=direction,
            sampler=sampler,
        )
        self._count = 0

    def suggest(self) -> ParamPoint:
        assert self._study is not None, "Call setup() before suggest()"
        assert self._space is not None

        self._current_trial = self._study.ask()
        point: ParamPoint = {}
        for name, dim in self._space.params.items():
            if isinstance(dim, Continuous):
                point[name] = self._current_trial.suggest_float(
                    name, dim.min_val, dim.max_val
                )
            elif isinstance(dim, Integer):
                point[name] = self._current_trial.suggest_int(
                    name, dim.min_val, dim.max_val
                )
            elif isinstance(dim, Discrete):
                point[name] = self._current_trial.suggest_categorical(
                    name, list(dim.values)
                )
        return point

    def observe(self, params: ParamPoint, scores: KPIScores) -> None:
        assert self._study is not None
        assert self._target_kpi is not None
        assert self._current_trial is not None
        value = scores[self._target_kpi]
        self._study.tell(self._current_trial, value)
        self._current_trial = None
        self._count += 1

    def is_exhausted(self) -> bool:
        return self._count >= self._n_trials