Skip to content

blackboxopt.optimizers.testing

Tests that can be imported and used to test optimizer implementations against this packages blackbox optimizer interface.

is_deterministic_with_fixed_seed(optimizer_class, optimizer_kwargs)

Check if optimizer is deterministic.

Repeatedly initialize the optimizer with the same parameter space and a fixed seed, get an evaluation specification, report a placeholder result and get another evaluation specification. The configuration of all final evaluation specifications should be equal.

Parameters:

Name Type Description Default
optimizer_class

Optimizer to test.

required
optimizer_kwargs dict

Expected to contain additional arguments for initializating the optimizer. (search_space and objective(s) are set automatically by the test.)

required

Returns:

Type Description
bool

True if the test is passed.

Source code in blackboxopt/optimizers/testing.py
def is_deterministic_with_fixed_seed(optimizer_class, optimizer_kwargs: dict) -> bool:
    """Check if optimizer is deterministic.

    Repeatedly initialize the optimizer with the same parameter space and a fixed seed,
    get an evaluation specification, report a placeholder result and get another
    evaluation specification. The configuration of all final evaluation specifications
    should be equal.

    Args:
        optimizer_class: Optimizer to test.
        optimizer_kwargs: Expected to contain additional arguments for initializating
            the optimizer. (`search_space` and `objective(s)` are set automatically
            by the test.)

    Returns:
        `True` if the test is passed.
    """
    final_configurations = []

    for _ in range(2):
        opt = _initialize_optimizer(
            optimizer_class,
            optimizer_kwargs,
            objective=Objective("loss", False),
            objectives=[Objective("loss", False)],
        )

        es1 = opt.get_evaluation_specification()
        opt.report_evaluation(es1.get_evaluation(objectives={"loss": 0.42}))
        es2 = opt.get_evaluation_specification()

        final_configurations.append(es2.configuration.copy())

    assert final_configurations[0] == final_configurations[1]
    return True

optimize_single_parameter_sequentially_for_n_max_evaluations(optimizer_class, optimizer_kwargs, n_max_evaluations=20)

[summary]

Parameters:

Name Type Description Default
optimizer_class

[description]

required
optimizer_kwargs dict

[description]

required
n_max_evaluations int

[description]

20

Returns:

Type Description
bool

[description]

Source code in blackboxopt/optimizers/testing.py
def optimize_single_parameter_sequentially_for_n_max_evaluations(
    optimizer_class, optimizer_kwargs: dict, n_max_evaluations: int = 20
) -> bool:
    """[summary]

    Args:
        optimizer_class: [description]
        optimizer_kwargs: [description]
        n_max_evaluations: [description]

    Returns:
        [description]
    """

    def quadratic_function(p1):
        return p1 ** 2

    assert issubclass(optimizer_class, Optimizer), (
        "The default test suite is only applicable for implementations of "
        "blackboxopt.base.Optimizer"
    )

    optimizer = _initialize_optimizer(
        optimizer_class,
        optimizer_kwargs,
        objective=Objective("loss", False),
        objectives=[Objective("loss", False), Objective("score", True)],
    )

    eval_spec = optimizer.get_evaluation_specification()

    if issubclass(optimizer_class, MultiObjectiveOptimizer):
        optimizer.report_evaluation(
            eval_spec.get_evaluation(objectives={"loss": None, "score": None})
        )
    else:
        optimizer.report_evaluation(eval_spec.get_evaluation(objectives={"loss": None}))

    for _ in range(n_max_evaluations):

        try:
            eval_spec = optimizer.get_evaluation_specification()
        except OptimizationComplete:
            break

        loss = quadratic_function(p1=eval_spec.configuration["p1"])
        if issubclass(optimizer_class, MultiObjectiveOptimizer):
            evaluation_result = {"loss": loss, "score": -loss}
        else:
            evaluation_result = {"loss": loss}

        optimizer.report_evaluation(
            eval_spec.get_evaluation(objectives=evaluation_result)
        )

    return True