Coverage for mlos_bench/mlos_bench/optimizers/one_shot_optimizer.py: 100%
15 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-05 00:36 +0000
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-05 00:36 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""
6No-op optimizer for mlos_bench that proposes a single configuration.
7"""
9import logging
10from typing import Optional
12from mlos_bench.services.base_service import Service
13from mlos_bench.tunables.tunable_groups import TunableGroups
14from mlos_bench.optimizers.mock_optimizer import MockOptimizer
16_LOG = logging.getLogger(__name__)
19class OneShotOptimizer(MockOptimizer):
20 """
21 Mock optimizer that proposes a single configuration and returns.
22 Explicit configs (partial or full) are possible using configuration files.
23 """
25 # TODO: Add support for multiple explicit configs (i.e., FewShot or Manual Optimizer) - #344
27 def __init__(self,
28 tunables: TunableGroups,
29 config: dict,
30 global_config: Optional[dict] = None,
31 service: Optional[Service] = None):
32 super().__init__(tunables, config, global_config, service)
33 _LOG.info("Run a single iteration for: %s", self._tunables)
34 self._max_iter = 1 # Always run for just one iteration.
36 @property
37 def supports_preload(self) -> bool:
38 return False