Coverage for mlos_bench/mlos_bench/tests/launcher_run_test.py: 97%

35 statements  

« prev     ^ index     » next       coverage.py v7.6.7, created at 2024-11-22 01:18 +0000

1# 

2# Copyright (c) Microsoft Corporation. 

3# Licensed under the MIT License. 

4# 

5"""Unit tests to check the main CLI launcher.""" 

6import os 

7import re 

8from typing import List 

9 

10import pytest 

11 

12from mlos_bench.services.config_persistence import ConfigPersistenceService 

13from mlos_bench.services.local.local_exec import LocalExecService 

14from mlos_bench.util import path_join 

15 

16# pylint: disable=redefined-outer-name 

17 

18 

19@pytest.fixture 

20def root_path() -> str: 

21 """Root path of mlos_bench project.""" 

22 return path_join(os.path.dirname(__file__), "../../..", abs_path=True) 

23 

24 

25@pytest.fixture 

26def local_exec_service() -> LocalExecService: 

27 """Test fixture for LocalExecService.""" 

28 return LocalExecService( 

29 parent=ConfigPersistenceService( 

30 { 

31 "config_path": [ 

32 "mlos_bench/config", 

33 "mlos_bench/examples", 

34 ] 

35 } 

36 ) 

37 ) 

38 

39 

40def _launch_main_app( 

41 root_path: str, 

42 local_exec_service: LocalExecService, 

43 cli_config: str, 

44 re_expected: List[str], 

45) -> None: 

46 """Run mlos_bench command-line application with given config and check the results 

47 in the log. 

48 """ 

49 with local_exec_service.temp_dir_context() as temp_dir: 

50 

51 # Test developers note: for local debugging, 

52 # uncomment the following line to use a known file path that can be examined: 

53 # temp_dir = '/tmp' 

54 log_path = path_join(temp_dir, "mock-test.log") 

55 (return_code, _stdout, _stderr) = local_exec_service.local_exec( 

56 [ 

57 "./mlos_bench/mlos_bench/run.py" 

58 + " --config_path ./mlos_bench/mlos_bench/tests/config/" 

59 + f" {cli_config} --log_file '{log_path}'" 

60 ], 

61 cwd=root_path, 

62 ) 

63 assert return_code == 0 

64 

65 try: 

66 iter_expected = iter(re_expected) 

67 re_log = re.compile(next(iter_expected)) 

68 with open(log_path, "rt", encoding="utf-8") as fh_out: 

69 for line in fh_out: 

70 if re_log.match(line): 

71 re_log = re.compile(next(iter_expected)) 

72 assert False, f"Pattern not found: '{re_log.pattern}'" 

73 except StopIteration: 

74 pass # Success: all patterns found 

75 

76 

77_RE_DATE = r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}" 

78 

79 

80def test_launch_main_app_bench(root_path: str, local_exec_service: LocalExecService) -> None: 

81 """Run mlos_bench command-line application with mock benchmark config and default 

82 tunable values and check the results in the log. 

83 """ 

84 _launch_main_app( 

85 root_path, 

86 local_exec_service, 

87 " --config cli/mock-bench.jsonc" 

88 + " --trial_config_repeat_count 5" 

89 + " --mock_env_seed -1", # Deterministic Mock Environment. 

90 [ 

91 f"^{_RE_DATE} run\\.py:\\d+ " + r"_main INFO Final score: \{'score': 67\.40\d+\}\s*$", 

92 ], 

93 ) 

94 

95 

96def test_launch_main_app_bench_values( 

97 root_path: str, 

98 local_exec_service: LocalExecService, 

99) -> None: 

100 """Run mlos_bench command-line application with mock benchmark config and user- 

101 specified tunable values and check the results in the log. 

102 """ 

103 _launch_main_app( 

104 root_path, 

105 local_exec_service, 

106 " --config cli/mock-bench.jsonc" 

107 + " --tunable_values tunable-values/tunable-values-example.jsonc" 

108 + " --trial_config_repeat_count 5" 

109 + " --mock_env_seed -1", # Deterministic Mock Environment. 

110 [ 

111 f"^{_RE_DATE} run\\.py:\\d+ " + r"_main INFO Final score: \{'score': 67\.11\d+\}\s*$", 

112 ], 

113 ) 

114 

115 

116def test_launch_main_app_opt(root_path: str, local_exec_service: LocalExecService) -> None: 

117 """Run mlos_bench command-line application with mock optimization config and check 

118 the results in the log. 

119 """ 

120 _launch_main_app( 

121 root_path, 

122 local_exec_service, 

123 "--config cli/mock-opt.jsonc" 

124 + " --trial_config_repeat_count 3" 

125 + " --max_suggestions 3" 

126 + " --mock_env_seed 42", # Noisy Mock Environment. 

127 [ 

128 # Iteration 1: Expect first value to be the baseline 

129 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " 

130 + r"bulk_register DEBUG Warm-up END: .* :: \{'score': 64\.53\d+\}$", 

131 # Iteration 2: The result may not always be deterministic 

132 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " 

133 + r"bulk_register DEBUG Warm-up END: .* :: \{'score': \d+\.\d+\}$", 

134 # Iteration 3: non-deterministic (depends on the optimizer) 

135 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " 

136 + r"bulk_register DEBUG Warm-up END: .* :: \{'score': \d+\.\d+\}$", 

137 # Final result: baseline is the optimum for the mock environment 

138 f"^{_RE_DATE} run\\.py:\\d+ " + r"_main INFO Final score: \{'score': 64\.53\d+\}\s*$", 

139 ], 

140 )