Coverage for mlos_bench/mlos_bench/tests/tunable_groups_fixtures.py: 100%
28 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-06 00:35 +0000
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-06 00:35 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""
6Common fixtures for mock TunableGroups.
7"""
9from typing import Any, Dict
11import pytest
13import json5 as json
15from mlos_bench.config.schemas import ConfigSchema
16from mlos_bench.tunables.covariant_group import CovariantTunableGroup
17from mlos_bench.tunables.tunable_groups import TunableGroups
19# pylint: disable=redefined-outer-name
21TUNABLE_GROUPS_JSON = """
22{
23 "provision": {
24 "cost": 1000,
25 "params": {
26 "vmSize": {
27 "description": "Azure VM size",
28 "type": "categorical",
29 "default": "Standard_B4ms",
30 "values": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"]
31 }
32 }
33 },
34 "boot": {
35 "cost": 300,
36 "params": {
37 "idle": {
38 "description": "Idling method",
39 "type": "categorical",
40 "default": "halt",
41 "values": ["halt", "mwait", "noidle"],
42 "values_weights": [33, 33, 33] // FLAML requires uniform weights
43 }
44 }
45 },
46 "kernel": {
47 "cost": 1,
48 "params": {
49 "kernel_sched_migration_cost_ns": {
50 "description": "Cost of migrating the thread to another core",
51 "type": "int",
52 "default": -1,
53 "range": [0, 500000],
54 "special": [-1, 0],
55 // FLAML requires uniform weights, separately for
56 // specials and switching between specials and range.
57 "special_weights": [0.25, 0.25],
58 "range_weight": 0.5,
59 "log": false
60 },
61 "kernel_sched_latency_ns": {
62 "description": "Initial value for the scheduler period",
63 "type": "int",
64 "default": 2000000,
65 "range": [0, 1000000000],
66 "log": false
67 }
68 }
69 }
70}
71"""
74@pytest.fixture
75def tunable_groups_config() -> Dict[str, Any]:
76 """
77 Fixture to get the JSON string for the tunable groups.
78 """
79 conf = json.loads(TUNABLE_GROUPS_JSON)
80 assert isinstance(conf, dict)
81 ConfigSchema.TUNABLE_PARAMS.validate(conf)
82 return conf
85@pytest.fixture
86def tunable_groups(tunable_groups_config: dict) -> TunableGroups:
87 """
88 A test fixture that produces a mock TunableGroups.
90 Returns
91 -------
92 tunable_groups : TunableGroups
93 A new TunableGroups object for testing.
94 """
95 tunables = TunableGroups(tunable_groups_config)
96 tunables.reset()
97 return tunables
100@pytest.fixture
101def covariant_group(tunable_groups: TunableGroups) -> CovariantTunableGroup:
102 """
103 Text fixture to get a CovariantTunableGroup from tunable_groups.
105 Returns
106 -------
107 CovariantTunableGroup
108 """
109 (_, covariant_group) = next(iter(tunable_groups))
110 return covariant_group
113@pytest.fixture
114def mixed_numerics_tunable_groups() -> TunableGroups:
115 """
116 A test fixture with mixed numeric tunable groups to test type conversions.
118 Returns
119 -------
120 tunable_groups : TunableGroups
121 A new TunableGroups object for testing.
122 """
123 tunables = TunableGroups({
124 "mix-numerics": {
125 "cost": 1,
126 "params": {
127 "int": {
128 "description": "An integer",
129 "type": "int",
130 "default": 0,
131 "range": [0, 100],
132 },
133 "float": {
134 "description": "A float",
135 "type": "float",
136 "default": 0,
137 "range": [0, 1],
138 },
139 }
140 },
141 })
142 tunables.reset()
143 return tunables