Coverage for mlos_bench/mlos_bench/tests/tunables/tunable_definition_test.py: 100%
147 statements
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-20 00:44 +0000
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-20 00:44 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""Unit tests for checking tunable definition rules."""
7import json5 as json
8import pytest
10from mlos_bench.tunables.tunable import Tunable, TunableValueTypeName
13def test_tunable_name() -> None:
14 """Check that tunable name is valid."""
15 with pytest.raises(ValueError):
16 # ! characters are currently disallowed in tunable names
17 Tunable(name="test!tunable", config={"type": "float", "range": [0, 1], "default": 0})
20def test_categorical_required_params() -> None:
21 """Check that required parameters are present for categorical tunables."""
22 json_config = """
23 {
24 "type": "categorical",
25 "values_missing": ["foo", "bar", "baz"],
26 "default": "foo"
27 }
28 """
29 config = json.loads(json_config)
30 with pytest.raises(ValueError):
31 Tunable(name="test", config=config)
34def test_categorical_weights() -> None:
35 """Instantiate a categorical tunable with weights."""
36 json_config = """
37 {
38 "type": "categorical",
39 "values": ["foo", "bar", "baz"],
40 "values_weights": [25, 25, 50],
41 "default": "foo"
42 }
43 """
44 config = json.loads(json_config)
45 tunable = Tunable(name="test", config=config)
46 assert tunable.weights == [25, 25, 50]
49def test_categorical_weights_wrong_count() -> None:
50 """Try to instantiate a categorical tunable with incorrect number of weights."""
51 json_config = """
52 {
53 "type": "categorical",
54 "values": ["foo", "bar", "baz"],
55 "values_weights": [50, 50],
56 "default": "foo"
57 }
58 """
59 config = json.loads(json_config)
60 with pytest.raises(ValueError):
61 Tunable(name="test", config=config)
64def test_categorical_weights_wrong_values() -> None:
65 """Try to instantiate a categorical tunable with invalid weights."""
66 json_config = """
67 {
68 "type": "categorical",
69 "values": ["foo", "bar", "baz"],
70 "values_weights": [-1, 50, 50],
71 "default": "foo"
72 }
73 """
74 config = json.loads(json_config)
75 with pytest.raises(ValueError):
76 Tunable(name="test", config=config)
79def test_categorical_wrong_params() -> None:
80 """Disallow range param for categorical tunables."""
81 json_config = """
82 {
83 "type": "categorical",
84 "values": ["foo", "bar", "foo"],
85 "range": [0, 1],
86 "default": "foo"
87 }
88 """
89 config = json.loads(json_config)
90 with pytest.raises(ValueError):
91 Tunable(name="test", config=config)
94def test_categorical_disallow_special_values() -> None:
95 """Disallow special values for categorical values."""
96 json_config = """
97 {
98 "type": "categorical",
99 "values": ["foo", "bar", "foo"],
100 "special": ["baz"],
101 "default": "foo"
102 }
103 """
104 config = json.loads(json_config)
105 with pytest.raises(ValueError):
106 Tunable(name="test", config=config)
109def test_categorical_tunable_disallow_repeats() -> None:
110 """Disallow duplicate values in categorical tunables."""
111 with pytest.raises(ValueError):
112 Tunable(
113 name="test",
114 config={
115 "type": "categorical",
116 "values": ["foo", "bar", "foo"],
117 "default": "foo",
118 },
119 )
122@pytest.mark.parametrize("tunable_type", ["int", "float"])
123def test_numerical_tunable_disallow_null_default(tunable_type: TunableValueTypeName) -> None:
124 """Disallow null values as default for numerical tunables."""
125 with pytest.raises(ValueError):
126 Tunable(
127 name=f"test_{tunable_type}",
128 config={
129 "type": tunable_type,
130 "range": [0, 10],
131 "default": None,
132 },
133 )
136@pytest.mark.parametrize("tunable_type", ["int", "float"])
137def test_numerical_tunable_disallow_out_of_range(tunable_type: TunableValueTypeName) -> None:
138 """Disallow out of range values as default for numerical tunables."""
139 with pytest.raises(ValueError):
140 Tunable(
141 name=f"test_{tunable_type}",
142 config={
143 "type": tunable_type,
144 "range": [0, 10],
145 "default": 11,
146 },
147 )
150@pytest.mark.parametrize("tunable_type", ["int", "float"])
151def test_numerical_tunable_wrong_params(tunable_type: TunableValueTypeName) -> None:
152 """Disallow values param for numerical tunables."""
153 with pytest.raises(ValueError):
154 Tunable(
155 name=f"test_{tunable_type}",
156 config={
157 "type": tunable_type,
158 "range": [0, 10],
159 "values": ["foo", "bar"],
160 "default": 0,
161 },
162 )
165@pytest.mark.parametrize("tunable_type", ["int", "float"])
166def test_numerical_tunable_required_params(tunable_type: TunableValueTypeName) -> None:
167 """Disallow null values param for numerical tunables."""
168 json_config = f"""
169 {
170 "type": "{tunable_type}",
171 "range_missing": [0, 10],
172 "default": 0
173 }
174 """
175 config = json.loads(json_config)
176 with pytest.raises(ValueError):
177 Tunable(name=f"test_{tunable_type}", config=config)
180@pytest.mark.parametrize("tunable_type", ["int", "float"])
181def test_numerical_tunable_invalid_range(tunable_type: TunableValueTypeName) -> None:
182 """Disallow invalid range param for numerical tunables."""
183 json_config = f"""
184 {
185 "type": "{tunable_type}",
186 "range": [0, 10, 7],
187 "default": 0
188 }
189 """
190 config = json.loads(json_config)
191 with pytest.raises(AssertionError):
192 Tunable(name=f"test_{tunable_type}", config=config)
195@pytest.mark.parametrize("tunable_type", ["int", "float"])
196def test_numerical_tunable_reversed_range(tunable_type: TunableValueTypeName) -> None:
197 """Disallow reverse range param for numerical tunables."""
198 json_config = f"""
199 {
200 "type": "{tunable_type}",
201 "range": [10, 0],
202 "default": 0
203 }
204 """
205 config = json.loads(json_config)
206 with pytest.raises(ValueError):
207 Tunable(name=f"test_{tunable_type}", config=config)
210@pytest.mark.parametrize("tunable_type", ["int", "float"])
211def test_numerical_weights(tunable_type: TunableValueTypeName) -> None:
212 """Instantiate a numerical tunable with weighted special values."""
213 json_config = f"""
214 {
215 "type": "{tunable_type}",
216 "range": [0, 100],
217 "special": [0],
218 "special_weights": [0.1],
219 "range_weight": 0.9,
220 "default": 0
221 }
222 """
223 config = json.loads(json_config)
224 tunable = Tunable(name="test", config=config)
225 assert tunable.special == [0]
226 assert tunable.weights == [0.1]
227 assert tunable.range_weight == 0.9
230@pytest.mark.parametrize("tunable_type", ["int", "float"])
231def test_numerical_quantization(tunable_type: TunableValueTypeName) -> None:
232 """Instantiate a numerical tunable with quantization."""
233 json_config = f"""
234 {
235 "type": "{tunable_type}",
236 "range": [0, 100],
237 "quantization_bins": 11,
238 "default": 0
239 }
240 """
241 config = json.loads(json_config)
242 tunable = Tunable(name="test", config=config)
243 expected = [0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
244 assert tunable.quantization_bins == len(expected)
245 assert pytest.approx(list(tunable.quantized_values or []), 1e-8) == expected
246 assert not tunable.is_log
249@pytest.mark.parametrize("tunable_type", ["int", "float"])
250def test_numerical_log(tunable_type: TunableValueTypeName) -> None:
251 """Instantiate a numerical tunable with log scale."""
252 json_config = f"""
253 {
254 "type": "{tunable_type}",
255 "range": [0, 100],
256 "log": true,
257 "default": 0
258 }
259 """
260 config = json.loads(json_config)
261 tunable = Tunable(name="test", config=config)
262 assert tunable.is_log
265@pytest.mark.parametrize("tunable_type", ["int", "float"])
266def test_numerical_weights_no_specials(tunable_type: TunableValueTypeName) -> None:
267 """Raise an error if special_weights are specified but no special values."""
268 json_config = f"""
269 {
270 "type": "{tunable_type}",
271 "range": [0, 100],
272 "special_weights": [0.1, 0.9],
273 "default": 0
274 }
275 """
276 config = json.loads(json_config)
277 with pytest.raises(ValueError):
278 Tunable(name="test", config=config)
281@pytest.mark.parametrize("tunable_type", ["int", "float"])
282def test_numerical_weights_non_normalized(tunable_type: TunableValueTypeName) -> None:
283 """Instantiate a numerical tunable with non-normalized weights of the special
284 values.
285 """
286 json_config = f"""
287 {
288 "type": "{tunable_type}",
289 "range": [0, 100],
290 "special": [-1, 0],
291 "special_weights": [0, 10],
292 "range_weight": 90,
293 "default": 0
294 }
295 """
296 config = json.loads(json_config)
297 tunable = Tunable(name="test", config=config)
298 assert tunable.special == [-1, 0]
299 assert tunable.weights == [0, 10] # Zero weights are ok
300 assert tunable.range_weight == 90
303@pytest.mark.parametrize("tunable_type", ["int", "float"])
304def test_numerical_weights_wrong_count(tunable_type: TunableValueTypeName) -> None:
305 """Try to instantiate a numerical tunable with incorrect number of weights."""
306 json_config = f"""
307 {
308 "type": "{tunable_type}",
309 "range": [0, 100],
310 "special": [0],
311 "special_weights": [0.1, 0.1, 0.8],
312 "range_weight": 0.1,
313 "default": 0
314 }
315 """
316 config = json.loads(json_config)
317 with pytest.raises(ValueError):
318 Tunable(name="test", config=config)
321@pytest.mark.parametrize("tunable_type", ["int", "float"])
322def test_numerical_weights_no_range_weight(tunable_type: TunableValueTypeName) -> None:
323 """Try to instantiate a numerical tunable with weights but no range_weight."""
324 json_config = f"""
325 {
326 "type": "{tunable_type}",
327 "range": [0, 100],
328 "special": [0, -1],
329 "special_weights": [0.1, 0.2],
330 "default": 0
331 }
332 """
333 config = json.loads(json_config)
334 with pytest.raises(ValueError):
335 Tunable(name="test", config=config)
338@pytest.mark.parametrize("tunable_type", ["int", "float"])
339def test_numerical_range_weight_no_weights(tunable_type: TunableValueTypeName) -> None:
340 """Try to instantiate a numerical tunable with specials but no range_weight."""
341 json_config = f"""
342 {
343 "type": "{tunable_type}",
344 "range": [0, 100],
345 "special": [0, -1],
346 "range_weight": 0.3,
347 "default": 0
348 }
349 """
350 config = json.loads(json_config)
351 with pytest.raises(ValueError):
352 Tunable(name="test", config=config)
355@pytest.mark.parametrize("tunable_type", ["int", "float"])
356def test_numerical_range_weight_no_specials(tunable_type: TunableValueTypeName) -> None:
357 """Try to instantiate a numerical tunable with specials but no range_weight."""
358 json_config = f"""
359 {
360 "type": "{tunable_type}",
361 "range": [0, 100],
362 "range_weight": 0.3,
363 "default": 0
364 }
365 """
366 config = json.loads(json_config)
367 with pytest.raises(ValueError):
368 Tunable(name="test", config=config)
371@pytest.mark.parametrize("tunable_type", ["int", "float"])
372def test_numerical_weights_wrong_values(tunable_type: TunableValueTypeName) -> None:
373 """Try to instantiate a numerical tunable with incorrect number of weights."""
374 json_config = f"""
375 {
376 "type": "{tunable_type}",
377 "range": [0, 100],
378 "special": [0],
379 "special_weights": [-1],
380 "range_weight": 10,
381 "default": 0
382 }
383 """
384 config = json.loads(json_config)
385 with pytest.raises(ValueError):
386 Tunable(name="test", config=config)
389@pytest.mark.parametrize("tunable_type", ["int", "float"])
390def test_numerical_quantization_wrong(tunable_type: TunableValueTypeName) -> None:
391 """Instantiate a numerical tunable with invalid number of quantization points."""
392 json_config = f"""
393 {
394 "type": "{tunable_type}",
395 "range": [0, 100],
396 "quantization_bins": 0,
397 "default": 0
398 }
399 """
400 config = json.loads(json_config)
401 with pytest.raises(ValueError):
402 Tunable(name="test", config=config)
405def test_bad_type() -> None:
406 """Disallow bad types."""
407 json_config = """
408 {
409 "type": "foo",
410 "range": [0, 10],
411 "default": 0
412 }
413 """
414 config = json.loads(json_config)
415 with pytest.raises(ValueError):
416 Tunable(name="test_bad_type", config=config)