Skip to content

Hyperband

Hyperband (StagedIterationOptimizer)

Source code in blackboxopt/optimizers/hyperband.py
class Hyperband(StagedIterationOptimizer):
    def __init__(
        self,
        search_space: ParameterSpace,
        objective: Objective,
        min_fidelity: float,
        max_fidelity: float,
        num_iterations: int,
        eta: float = 3.0,
        seed: int = None,
        logger: logging.Logger = None,
    ):
        """Implementation of Hyperband as proposed in

        Li, L., Jamieson, K., DeSalvo, G., Rostamizadeh, A., & Talwalkar, A. (2016).
        Hyperband: A novel bandit-based approach to hyperparameter optimization.
        arXiv preprint arXiv:1603.06560.

        Args:
            search_space: [description]
            objective: [description]
            min_fidelity: The smallest fidelity value that is still meaningful.
                Must be strictly greater than zero!
            max_fidelity: The largest fidelity value used during the optimization.
                Must not be smaller than `min_fidelity`
            num_iterations: [description]
            eta: Scaling parameter to control the aggressiveness of Hyperband's racing.
            seed: [description]
            logger: [description]
        """
        self.config_sampler = RandomSearchSampler(search_space)
        self.min_fidelity = min_fidelity
        self.max_fidelity = max_fidelity
        self.eta = eta

        super().__init__(
            search_space=search_space,
            objective=objective,
            num_iterations=num_iterations,
            seed=seed,
            logger=logger,
        )

    def _create_new_iteration(self, iteration_index: int) -> StagedIteration:
        """Optimizer specific way to create a new
        `blackboxopt.optimizer.staged.iteration.StagedIteration` object
        """
        return create_hyperband_iteration(
            iteration_index,
            self.min_fidelity,
            self.max_fidelity,
            self.eta,
            self.config_sampler,
            self.objective,
            self.logger,
        )

__init__(self, search_space, objective, min_fidelity, max_fidelity, num_iterations, eta=3.0, seed=None, logger=None) special

Implementation of Hyperband as proposed in

Li, L., Jamieson, K., DeSalvo, G., Rostamizadeh, A., & Talwalkar, A. (2016). Hyperband: A novel bandit-based approach to hyperparameter optimization. arXiv preprint arXiv:1603.06560.

Parameters:

Name Type Description Default
search_space ParameterSpace

[description]

required
objective Objective

[description]

required
min_fidelity float

The smallest fidelity value that is still meaningful. Must be strictly greater than zero!

required
max_fidelity float

The largest fidelity value used during the optimization. Must not be smaller than min_fidelity

required
num_iterations int

[description]

required
eta float

Scaling parameter to control the aggressiveness of Hyperband's racing.

3.0
seed int

[description]

None
logger Logger

[description]

None
Source code in blackboxopt/optimizers/hyperband.py
def __init__(
    self,
    search_space: ParameterSpace,
    objective: Objective,
    min_fidelity: float,
    max_fidelity: float,
    num_iterations: int,
    eta: float = 3.0,
    seed: int = None,
    logger: logging.Logger = None,
):
    """Implementation of Hyperband as proposed in

    Li, L., Jamieson, K., DeSalvo, G., Rostamizadeh, A., & Talwalkar, A. (2016).
    Hyperband: A novel bandit-based approach to hyperparameter optimization.
    arXiv preprint arXiv:1603.06560.

    Args:
        search_space: [description]
        objective: [description]
        min_fidelity: The smallest fidelity value that is still meaningful.
            Must be strictly greater than zero!
        max_fidelity: The largest fidelity value used during the optimization.
            Must not be smaller than `min_fidelity`
        num_iterations: [description]
        eta: Scaling parameter to control the aggressiveness of Hyperband's racing.
        seed: [description]
        logger: [description]
    """
    self.config_sampler = RandomSearchSampler(search_space)
    self.min_fidelity = min_fidelity
    self.max_fidelity = max_fidelity
    self.eta = eta

    super().__init__(
        search_space=search_space,
        objective=objective,
        num_iterations=num_iterations,
        seed=seed,
        logger=logger,
    )