Coverage for mlos_bench/mlos_bench/tests/optimizers/conftest.py: 100%
32 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-06 00:35 +0000
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-06 00:35 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""
6Test fixtures for mlos_bench optimizers.
7"""
9from typing import List
11import pytest
13from mlos_bench.tunables.tunable_groups import TunableGroups
14from mlos_bench.optimizers.mock_optimizer import MockOptimizer
15from mlos_bench.optimizers.mlos_core_optimizer import MlosCoreOptimizer
17from mlos_bench.tests import SEED
20@pytest.fixture
21def mock_configs() -> List[dict]:
22 """
23 Mock configurations of earlier experiments.
24 """
25 return [
26 {
27 'vmSize': 'Standard_B4ms',
28 'idle': 'halt',
29 'kernel_sched_migration_cost_ns': 50000,
30 'kernel_sched_latency_ns': 1000000,
31 },
32 {
33 'vmSize': 'Standard_B4ms',
34 'idle': 'halt',
35 'kernel_sched_migration_cost_ns': 40000,
36 'kernel_sched_latency_ns': 2000000,
37 },
38 {
39 'vmSize': 'Standard_B4ms',
40 'idle': 'mwait',
41 'kernel_sched_migration_cost_ns': -1, # Special value
42 'kernel_sched_latency_ns': 3000000,
43 },
44 {
45 'vmSize': 'Standard_B2s',
46 'idle': 'mwait',
47 'kernel_sched_migration_cost_ns': 200000,
48 'kernel_sched_latency_ns': 4000000,
49 }
50 ]
53@pytest.fixture
54def mock_opt_no_defaults(tunable_groups: TunableGroups) -> MockOptimizer:
55 """
56 Test fixture for MockOptimizer that ignores the initial configuration.
57 """
58 return MockOptimizer(
59 tunables=tunable_groups,
60 service=None,
61 config={
62 "optimization_targets": {"score": "min"},
63 "max_suggestions": 5,
64 "start_with_defaults": False,
65 "seed": SEED
66 },
67 )
70@pytest.fixture
71def mock_opt(tunable_groups: TunableGroups) -> MockOptimizer:
72 """
73 Test fixture for MockOptimizer.
74 """
75 return MockOptimizer(
76 tunables=tunable_groups,
77 service=None,
78 config={
79 "optimization_targets": {"score": "min"},
80 "max_suggestions": 5,
81 "seed": SEED
82 },
83 )
86@pytest.fixture
87def mock_opt_max(tunable_groups: TunableGroups) -> MockOptimizer:
88 """
89 Test fixture for MockOptimizer.
90 """
91 return MockOptimizer(
92 tunables=tunable_groups,
93 service=None,
94 config={
95 "optimization_targets": {"score": "max"},
96 "max_suggestions": 10,
97 "seed": SEED
98 },
99 )
102@pytest.fixture
103def flaml_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
104 """
105 Test fixture for mlos_core FLAML optimizer.
106 """
107 return MlosCoreOptimizer(
108 tunables=tunable_groups,
109 service=None,
110 config={
111 "optimization_targets": {"score": "min"},
112 "max_suggestions": 15,
113 "optimizer_type": "FLAML",
114 "seed": SEED,
115 },
116 )
119@pytest.fixture
120def flaml_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
121 """
122 Test fixture for mlos_core FLAML optimizer.
123 """
124 return MlosCoreOptimizer(
125 tunables=tunable_groups,
126 service=None,
127 config={
128 "optimization_targets": {"score": "max"},
129 "max_suggestions": 15,
130 "optimizer_type": "FLAML",
131 "seed": SEED,
132 },
133 )
136# FIXME: SMAC's RF model can be non-deterministic at low iterations, which are
137# normally calculated as a percentage of the max_iterations and number of
138# tunable dimensions, so for now we set the initial random samples equal to the
139# number of iterations and control them with a seed.
141SMAC_ITERATIONS = 10
144@pytest.fixture
145def smac_opt(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
146 """
147 Test fixture for mlos_core SMAC optimizer.
148 """
149 return MlosCoreOptimizer(
150 tunables=tunable_groups,
151 service=None,
152 config={
153 "optimization_targets": {"score": "min"},
154 "max_suggestions": SMAC_ITERATIONS,
155 "optimizer_type": "SMAC",
156 "seed": SEED,
157 "output_directory": None,
158 # See Above
159 "n_random_init": SMAC_ITERATIONS,
160 "max_ratio": 1.0,
161 },
162 )
165@pytest.fixture
166def smac_opt_max(tunable_groups: TunableGroups) -> MlosCoreOptimizer:
167 """
168 Test fixture for mlos_core SMAC optimizer.
169 """
170 return MlosCoreOptimizer(
171 tunables=tunable_groups,
172 service=None,
173 config={
174 "optimization_targets": {"score": "max"},
175 "max_suggestions": SMAC_ITERATIONS,
176 "optimizer_type": "SMAC",
177 "seed": SEED,
178 "output_directory": None,
179 # See Above
180 "n_random_init": SMAC_ITERATIONS,
181 "max_ratio": 1.0,
182 },
183 )