Coverage for mlos_bench/mlos_bench/tests/launcher_run_test.py: 97%
34 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-05 00:36 +0000
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-05 00:36 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""
6Unit tests to check the main CLI launcher.
7"""
8import os
9import re
10from typing import List
12import pytest
14from mlos_bench.services.local.local_exec import LocalExecService
15from mlos_bench.services.config_persistence import ConfigPersistenceService
16from mlos_bench.util import path_join
18# pylint: disable=redefined-outer-name
21@pytest.fixture
22def root_path() -> str:
23 """
24 Root path of mlos_bench project.
25 """
26 return path_join(os.path.dirname(__file__), "../../..", abs_path=True)
29@pytest.fixture
30def local_exec_service() -> LocalExecService:
31 """
32 Test fixture for LocalExecService.
33 """
34 return LocalExecService(parent=ConfigPersistenceService({
35 "config_path": [
36 "mlos_bench/config",
37 "mlos_bench/examples",
38 ]
39 }))
42def _launch_main_app(root_path: str, local_exec_service: LocalExecService,
43 cli_config: str, re_expected: List[str]) -> None:
44 """
45 Run mlos_bench command-line application with given config
46 and check the results in the log.
47 """
48 with local_exec_service.temp_dir_context() as temp_dir:
50 # Test developers note: for local debugging,
51 # uncomment the following line to use a known file path that can be examined:
52 # temp_dir = '/tmp'
53 log_path = path_join(temp_dir, "mock-test.log")
54 (return_code, _stdout, _stderr) = local_exec_service.local_exec(
55 [f"./mlos_bench/mlos_bench/run.py {cli_config} --log_file '{log_path}'"],
56 cwd=root_path)
57 assert return_code == 0
59 try:
60 iter_expected = iter(re_expected)
61 re_log = re.compile(next(iter_expected))
62 with open(log_path, "rt", encoding="utf-8") as fh_out:
63 for line in fh_out:
64 if re_log.match(line):
65 re_log = re.compile(next(iter_expected))
66 assert False, f"Pattern not found: '{re_log.pattern}'"
67 except StopIteration:
68 pass # Success: all patterns found
71_RE_DATE = r"\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3}"
74def test_launch_main_app_bench(root_path: str, local_exec_service: LocalExecService) -> None:
75 """
76 Run mlos_bench command-line application with mock benchmark config
77 and check the results in the log.
78 """
79 _launch_main_app(
80 root_path, local_exec_service,
81 "--config mlos_bench/mlos_bench/tests/config/cli/mock-bench.jsonc",
82 [
83 f"^{_RE_DATE} run\\.py:\\d+ " +
84 r"_main INFO Final score: 65\.67\d+\s*$",
85 ]
86 )
89def test_launch_main_app_opt(root_path: str, local_exec_service: LocalExecService) -> None:
90 """
91 Run mlos_bench command-line application with mock optimization config
92 and check the results in the log.
93 """
94 _launch_main_app(
95 root_path, local_exec_service,
96 "--config mlos_bench/mlos_bench/tests/config/cli/mock-opt.jsonc --trial_config_repeat_count 3 --max_suggestions 3",
97 [
98 # Iteration 1: Expect first value to be the baseline
99 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " +
100 r"bulk_register DEBUG Warm-up end: score = 64\.53\d+$",
101 # Iteration 2: The result may not always be deterministic
102 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " +
103 r"bulk_register DEBUG Warm-up end: score = \d+\.\d+$",
104 # Iteration 3: non-deterministic (depends on the optimizer)
105 f"^{_RE_DATE} mlos_core_optimizer\\.py:\\d+ " +
106 r"bulk_register DEBUG Warm-up end: score = \d+\.\d+$",
107 # Final result: baseline is the optimum for the mock environment
108 f"^{_RE_DATE} run\\.py:\\d+ " +
109 r"_main INFO Final score: 64\.53\d+\s*$",
110 ]
111 )