blackboxopt.optimization_loops.utils
evaluation_function_wrapper(evaluation_function, evaluation_specification, objectives, logger)
Wrapper for evaluation functions. The evaluation result returned by the evaluation function is checked to contain all relevant objectives. An empty evaluation with a stacktrace is reported to the optiizer in case an unhandled Exception occurrs during the evaluation function call.
Source code in blackboxopt/optimization_loops/utils.py
def evaluation_function_wrapper(
evaluation_function: Callable[[EvaluationSpecification], Evaluation],
evaluation_specification: EvaluationSpecification,
objectives: List[Objective],
logger: logging.Logger,
) -> Evaluation:
"""Wrapper for evaluation functions. The evaluation result returned by the
evaluation function is checked to contain all relevant objectives. An empty
evaluation with a stacktrace is reported to the optiizer in case an unhandled
Exception occurrs during the evaluation function call.
"""
try:
evaluation = evaluation_function(evaluation_specification)
except Exception:
stacktrace = traceback.format_exc()
logger.warning("Report FAILURE due to unhandled error during evaluation")
logger.debug(stacktrace)
evaluation = evaluation_specification.get_evaluation(
stacktrace=stacktrace, objectives={o.name: None for o in objectives}
)
raise_on_unknown_or_incomplete_objectives(
known_objectives=objectives, reported_objectives=evaluation.objectives
)
return evaluation
init_max_evaluations_with_limit_logging(timeout_s, logger, max_evaluations=None)
[summary]
Parameters:
Name | Type | Description | Default |
---|---|---|---|
timeout_s |
float |
[description] |
required |
logger |
Logger |
[description] |
required |
max_evaluations |
int |
[description] |
None |
Returns:
Type | Description |
---|---|
float |
[description] |
Source code in blackboxopt/optimization_loops/utils.py
def init_max_evaluations_with_limit_logging(
timeout_s: float, logger: logging.Logger, max_evaluations: int = None
) -> float:
"""[summary]
Args:
timeout_s: [description]
logger: [description]
max_evaluations: [description]
Returns:
[description]
"""
if max_evaluations:
logger.info(
f"Starting optimization run until complete or {max_evaluations} evaluations"
)
return float(max_evaluations)
if timeout_s == float("inf"):
logger.info("Starting optimization run until complete")
else:
timeout_pretty = datetime.timedelta(seconds=timeout_s)
logger.info(
f"Starting optimization run until complete or {timeout_pretty} passed"
)
return float("inf")