Coverage for mlos_bench/mlos_bench/tests/tunable_groups_fixtures.py: 100%

27 statements  

« prev     ^ index     » next       coverage.py v7.6.7, created at 2024-11-22 01:18 +0000

1# 

2# Copyright (c) Microsoft Corporation. 

3# Licensed under the MIT License. 

4# 

5"""Common fixtures for mock TunableGroups.""" 

6 

7from typing import Any, Dict 

8 

9import json5 as json 

10import pytest 

11 

12from mlos_bench.config.schemas import ConfigSchema 

13from mlos_bench.tunables.covariant_group import CovariantTunableGroup 

14from mlos_bench.tunables.tunable_groups import TunableGroups 

15 

16# pylint: disable=redefined-outer-name 

17 

18# TODO: Add quantized tunables here for better test coverage. 

19 

20TUNABLE_GROUPS_JSON = """ 

21{ 

22 "provision": { 

23 "cost": 1000, 

24 "params": { 

25 "vmSize": { 

26 "description": "Azure VM size", 

27 "type": "categorical", 

28 "default": "Standard_B4ms", 

29 "values": ["Standard_B2s", "Standard_B2ms", "Standard_B4ms"] 

30 } 

31 } 

32 }, 

33 "boot": { 

34 "cost": 300, 

35 "params": { 

36 "idle": { 

37 "description": "Idling method", 

38 "type": "categorical", 

39 "default": "halt", 

40 "values": ["halt", "mwait", "noidle"], 

41 "values_weights": [33, 33, 33] // FLAML requires uniform weights 

42 } 

43 } 

44 }, 

45 "kernel": { 

46 "cost": 1, 

47 "params": { 

48 "kernel_sched_migration_cost_ns": { 

49 "description": "Cost of migrating the thread to another core", 

50 "type": "int", 

51 "default": -1, 

52 "range": [0, 500000], 

53 "special": [-1, 0], 

54 // FLAML requires uniform weights, separately for 

55 // specials and switching between specials and range. 

56 "special_weights": [0.25, 0.25], 

57 "range_weight": 0.5, 

58 "log": false 

59 }, 

60 "kernel_sched_latency_ns": { 

61 "description": "Initial value for the scheduler period", 

62 "type": "int", 

63 "default": 2000000, 

64 "range": [0, 1000000000], 

65 "quantization_bins": 11, 

66 "log": false 

67 } 

68 } 

69 } 

70} 

71""" 

72 

73 

74@pytest.fixture 

75def tunable_groups_config() -> Dict[str, Any]: 

76 """Fixture to get the JSON string for the tunable groups.""" 

77 conf = json.loads(TUNABLE_GROUPS_JSON) 

78 assert isinstance(conf, dict) 

79 ConfigSchema.TUNABLE_PARAMS.validate(conf) 

80 return conf 

81 

82 

83@pytest.fixture 

84def tunable_groups(tunable_groups_config: dict) -> TunableGroups: 

85 """ 

86 A test fixture that produces a mock TunableGroups. 

87 

88 Returns 

89 ------- 

90 tunable_groups : TunableGroups 

91 A new TunableGroups object for testing. 

92 """ 

93 tunables = TunableGroups(tunable_groups_config) 

94 tunables.reset() 

95 return tunables 

96 

97 

98@pytest.fixture 

99def covariant_group(tunable_groups: TunableGroups) -> CovariantTunableGroup: 

100 """ 

101 Text fixture to get a CovariantTunableGroup from tunable_groups. 

102 

103 Returns 

104 ------- 

105 CovariantTunableGroup 

106 """ 

107 (_, covariant_group) = next(iter(tunable_groups)) 

108 return covariant_group 

109 

110 

111@pytest.fixture 

112def mixed_numerics_tunable_groups() -> TunableGroups: 

113 """ 

114 A test fixture with mixed numeric tunable groups to test type conversions. 

115 

116 Returns 

117 ------- 

118 tunable_groups : TunableGroups 

119 A new TunableGroups object for testing. 

120 """ 

121 tunables = TunableGroups( 

122 { 

123 "mix-numerics": { 

124 "cost": 1, 

125 "params": { 

126 "int": { 

127 "description": "An integer", 

128 "type": "int", 

129 "default": 0, 

130 "range": [0, 100], 

131 }, 

132 "float": { 

133 "description": "A float", 

134 "type": "float", 

135 "default": 0, 

136 "range": [0, 1], 

137 }, 

138 }, 

139 }, 

140 } 

141 ) 

142 tunables.reset() 

143 return tunables