Coverage for mlos_bench/mlos_bench/tests/optimizers/conftest.py: 100%
35 statements
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-20 00:44 +0000
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-20 00:44 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""Test fixtures for mlos_bench optimizers."""
7from typing import List
9import pytest
11from mlos_bench.optimizers.manual_optimizer import ManualOptimizer
12from mlos_bench.optimizers.mlos_core_optimizer import MlosCoreOptimizer
13from mlos_bench.optimizers.mock_optimizer import MockOptimizer
14from mlos_bench.tests import SEED
15from mlos_bench.tunables.tunable_groups import TunableGroups
17# pylint: disable=redefined-outer-name
20@pytest.fixture
21def mock_configs() -> List[dict]:
22 """Mock configurations of earlier experiments."""
23 return [
24 {
25 "vmSize": "Standard_B4ms",
26 "idle": "halt",
27 "kernel_sched_migration_cost_ns": 50000,
28 "kernel_sched_latency_ns": 1000000,
29 },
30 {
31 "vmSize": "Standard_B4ms",
32 "idle": "halt",
33 "kernel_sched_migration_cost_ns": 40000,
34 "kernel_sched_latency_ns": 2000000,
35 },
36 {
37 "vmSize": "Standard_B4ms",
38 "idle": "mwait",
39 "kernel_sched_migration_cost_ns": -1, # Special value
40 "kernel_sched_latency_ns": 3000000,
41 },
42 {
43 "vmSize": "Standard_B2s",
44 "idle": "mwait",
45 "kernel_sched_migration_cost_ns": 200000,
46 "kernel_sched_latency_ns": 4000000,
47 },
48 ]
51@pytest.fixture
52def mock_opt_no_defaults(tunable_groups: TunableGroups) -> MockOptimizer:
53 """Test fixture for MockOptimizer that ignores the initial configuration."""
54 return MockOptimizer(
55 tunables=tunable_groups,
56 service=None,
57 config={
58 "optimization_targets": {"score": "min"},
59 "max_suggestions": 5,
60 "start_with_defaults": False,
61 "seed": SEED,
62 },
63 )
66@pytest.fixture
67def mock_opt(tunable_groups: TunableGroups) -> MockOptimizer:
68 """Test fixture for MockOptimizer."""
69 return MockOptimizer(
70 tunables=tunable_groups,
71 service=None,
72 config={"optimization_targets": {"score": "min"}, "max_suggestions": 5, "seed": SEED},
73 )
76@pytest.fixture
77def mock_opt_max(tunable_groups: TunableGroups) -> MockOptimizer:
78 """Test fixture for MockOptimizer."""
79 return MockOptimizer(
80 tunables=tunable_groups,
81 service=None,
82 config={"optimization_targets": {"score": "max"}, "max_suggestions": 10, "seed": SEED},
83 )
86@pytest.fixture
87def flaml_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
88 """Test fixture for mlos_core FLAML optimizer."""
89 return MlosCoreOptimizer(
90 tunables=tunable_groups,
91 service=None,
92 config={
93 "optimization_targets": {"score": "min"},
94 "max_suggestions": 15,
95 "optimizer_type": "FLAML",
96 "seed": SEED,
97 },
98 )
101@pytest.fixture
102def flaml_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
103 """Test fixture for mlos_core FLAML optimizer."""
104 return MlosCoreOptimizer(
105 tunables=tunable_groups,
106 service=None,
107 config={
108 "optimization_targets": {"score": "max"},
109 "max_suggestions": 15,
110 "optimizer_type": "FLAML",
111 "seed": SEED,
112 },
113 )
116# FIXME: SMAC's RF model can be non-deterministic at low iterations, which are
117# normally calculated as a percentage of the max_suggestions and number of
118# tunable dimensions, so for now we set the initial random samples equal to the
119# number of iterations and control them with a seed.
121SMAC_ITERATIONS = 10
124@pytest.fixture
125def smac_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
126 """Test fixture for mlos_core SMAC optimizer."""
127 return MlosCoreOptimizer(
128 tunables=tunable_groups,
129 service=None,
130 config={
131 "optimization_targets": {"score": "min"},
132 "max_suggestions": SMAC_ITERATIONS,
133 "optimizer_type": "SMAC",
134 "seed": SEED,
135 "output_directory": None,
136 # See Above
137 "n_random_init": SMAC_ITERATIONS,
138 "max_ratio": 1.0,
139 },
140 )
143@pytest.fixture
144def smac_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
145 """Test fixture for mlos_core SMAC optimizer."""
146 return MlosCoreOptimizer(
147 tunables=tunable_groups,
148 service=None,
149 config={
150 "optimization_targets": {"score": "max"},
151 "max_suggestions": SMAC_ITERATIONS,
152 "optimizer_type": "SMAC",
153 "seed": SEED,
154 "output_directory": None,
155 # See Above
156 "n_random_init": SMAC_ITERATIONS,
157 "max_ratio": 1.0,
158 },
159 )
162@pytest.fixture
163def manual_opt(tunable_groups: TunableGroups, mock_configs: List[dict]) -> ManualOptimizer:
164 """Test fixture for ManualOptimizer."""
165 return ManualOptimizer(
166 tunables=tunable_groups,
167 service=None,
168 config={
169 "max_cycles": 2,
170 "tunable_values_cycle": mock_configs,
171 },
172 )