Coverage for mlos_bench/mlos_bench/optimizers/one_shot_optimizer.py: 100%
18 statements
« prev ^ index » next coverage.py v7.6.8, created at 2024-11-26 00:46 +0000
« prev ^ index » next coverage.py v7.6.8, created at 2024-11-26 00:46 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""No-op optimizer for mlos_bench that proposes a single configuration."""
7import logging
8from typing import Optional
10from mlos_bench.optimizers.mock_optimizer import MockOptimizer
11from mlos_bench.services.base_service import Service
12from mlos_bench.tunables.tunable_groups import TunableGroups
14_LOG = logging.getLogger(__name__)
17class OneShotOptimizer(MockOptimizer):
18 """
19 No-op optimizer that proposes a single configuration and returns.
21 Explicit configs (partial or full) are possible using configuration files.
22 """
24 def __init__(
25 self,
26 tunables: TunableGroups,
27 config: dict,
28 global_config: Optional[dict] = None,
29 service: Optional[Service] = None,
30 ):
31 super().__init__(tunables, config, global_config, service)
32 _LOG.info("Run a single iteration for: %s", self._tunables)
33 self._max_suggestions = 1 # Always run for just one iteration.
35 def suggest(self) -> TunableGroups:
36 """Always produce the same (initial) suggestion."""
37 tunables = super().suggest()
38 self._start_with_defaults = True
39 return tunables
41 @property
42 def supports_preload(self) -> bool:
43 return False