Coverage for mlos_bench/mlos_bench/tests/environments/local/local_env_test.py: 100%

37 statements  

« prev     ^ index     » next       coverage.py v7.11.0, created at 2025-10-30 00:51 +0000

1# 

2# Copyright (c) Microsoft Corporation. 

3# Licensed under the MIT License. 

4# 

5"""Unit tests for LocalEnv benchmark environment.""" 

6import pytest 

7 

8from mlos_bench.environments.status import Status 

9from mlos_bench.tests.environments import check_env_success 

10from mlos_bench.tests.environments.local import create_local_env 

11from mlos_bench.tunables.tunable_groups import TunableGroups 

12 

13 

14def test_local_env(tunable_groups: TunableGroups) -> None: 

15 """Produce benchmark and telemetry data in a local script and read it.""" 

16 local_env = create_local_env( 

17 tunable_groups, 

18 { 

19 "run": [ 

20 "echo 'metric,value' > output.csv", 

21 "echo 'latency,10' >> output.csv", 

22 "echo 'throughput,66' >> output.csv", 

23 "echo 'score,0.9' >> output.csv", 

24 ], 

25 "read_results_file": "output.csv", 

26 }, 

27 ) 

28 

29 check_env_success( 

30 local_env, 

31 tunable_groups, 

32 expected_results={ 

33 "latency": 10.0, 

34 "throughput": 66.0, 

35 "score": 0.9, 

36 }, 

37 expected_telemetry=[], 

38 ) 

39 

40 

41def test_local_env_service_context(tunable_groups: TunableGroups) -> None: 

42 """Basic check that context support for Service mixins are handled when environment 

43 contexts are entered. 

44 """ 

45 local_env = create_local_env(tunable_groups, {"run": ["echo NA"]}) 

46 # pylint: disable=protected-access 

47 assert local_env._service 

48 assert not local_env._service._in_context 

49 assert not local_env._service._service_contexts 

50 with local_env as env_context: 

51 assert env_context._in_context 

52 assert local_env._service._in_context 

53 assert local_env._service._service_contexts # type: ignore[unreachable] # (false positive) 

54 assert all(svc._in_context for svc in local_env._service._service_contexts) 

55 assert all(svc._in_context for svc in local_env._service._services) 

56 assert not local_env._service._in_context # type: ignore[unreachable] # (false positive) 

57 assert not local_env._service._service_contexts 

58 assert not any(svc._in_context for svc in local_env._service._services) 

59 

60 

61def test_local_env_results_no_header(tunable_groups: TunableGroups) -> None: 

62 """Fail if the results are not in the expected format.""" 

63 local_env = create_local_env( 

64 tunable_groups, 

65 { 

66 "run": [ 

67 # No header 

68 "echo 'latency,10' > output.csv", 

69 "echo 'throughput,66' >> output.csv", 

70 "echo 'score,0.9' >> output.csv", 

71 ], 

72 "read_results_file": "output.csv", 

73 }, 

74 ) 

75 

76 with local_env as env_context: 

77 assert env_context.setup(tunable_groups) 

78 with pytest.raises(ValueError): 

79 env_context.run() 

80 

81 

82def test_local_env_wide(tunable_groups: TunableGroups) -> None: 

83 """Produce benchmark data in wide format and read it.""" 

84 local_env = create_local_env( 

85 tunable_groups, 

86 { 

87 "run": [ 

88 "echo 'latency,throughput,score' > output.csv", 

89 "echo '10,66,0.9' >> output.csv", 

90 ], 

91 "read_results_file": "output.csv", 

92 }, 

93 ) 

94 

95 check_env_success( 

96 local_env, 

97 tunable_groups, 

98 expected_results={ 

99 "latency": 10, 

100 "throughput": 66, 

101 "score": 0.9, 

102 }, 

103 expected_telemetry=[], 

104 ) 

105 

106 

107def test_local_env_results_null_file(tunable_groups: TunableGroups) -> None: 

108 """When the results file is of zero length, do not crash but mark the trial 

109 FAILED. 

110 """ 

111 local_env = create_local_env( 

112 tunable_groups, 

113 { 

114 "run": [ 

115 "echo '' > output.csv", 

116 ], 

117 "read_results_file": "output.csv", 

118 }, 

119 ) 

120 

121 check_env_success( 

122 local_env, 

123 tunable_groups, 

124 expected_status_run={Status.FAILED}, 

125 expected_results=None, 

126 expected_telemetry=[], 

127 ) 

128 

129 

130def test_local_env_results_empty_file(tunable_groups: TunableGroups) -> None: 

131 """When the results file has no data, do not crash but mark the trial FAILED.""" 

132 local_env = create_local_env( 

133 tunable_groups, 

134 { 

135 "run": [ 

136 "echo 'latency,throughput,score' > output.csv", 

137 ], 

138 "read_results_file": "output.csv", 

139 }, 

140 ) 

141 

142 check_env_success( 

143 local_env, 

144 tunable_groups, 

145 expected_status_run={Status.FAILED}, 

146 expected_results=None, 

147 expected_telemetry=[], 

148 )