Coverage for mlos_bench/mlos_bench/tests/environments/local/local_env_test.py: 100%
30 statements
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-20 00:44 +0000
« prev ^ index » next coverage.py v7.6.9, created at 2024-12-20 00:44 +0000
1#
2# Copyright (c) Microsoft Corporation.
3# Licensed under the MIT License.
4#
5"""Unit tests for LocalEnv benchmark environment."""
6import pytest
8from mlos_bench.tests.environments import check_env_success
9from mlos_bench.tests.environments.local import create_local_env
10from mlos_bench.tunables.tunable_groups import TunableGroups
13def test_local_env(tunable_groups: TunableGroups) -> None:
14 """Produce benchmark and telemetry data in a local script and read it."""
15 local_env = create_local_env(
16 tunable_groups,
17 {
18 "run": [
19 "echo 'metric,value' > output.csv",
20 "echo 'latency,10' >> output.csv",
21 "echo 'throughput,66' >> output.csv",
22 "echo 'score,0.9' >> output.csv",
23 ],
24 "read_results_file": "output.csv",
25 },
26 )
28 check_env_success(
29 local_env,
30 tunable_groups,
31 expected_results={
32 "latency": 10.0,
33 "throughput": 66.0,
34 "score": 0.9,
35 },
36 expected_telemetry=[],
37 )
40def test_local_env_service_context(tunable_groups: TunableGroups) -> None:
41 """Basic check that context support for Service mixins are handled when environment
42 contexts are entered.
43 """
44 local_env = create_local_env(tunable_groups, {"run": ["echo NA"]})
45 # pylint: disable=protected-access
46 assert local_env._service
47 assert not local_env._service._in_context
48 assert not local_env._service._service_contexts
49 with local_env as env_context:
50 assert env_context._in_context
51 assert local_env._service._in_context
52 assert local_env._service._service_contexts # type: ignore[unreachable] # (false positive)
53 assert all(svc._in_context for svc in local_env._service._service_contexts)
54 assert all(svc._in_context for svc in local_env._service._services)
55 assert not local_env._service._in_context # type: ignore[unreachable] # (false positive)
56 assert not local_env._service._service_contexts
57 assert not any(svc._in_context for svc in local_env._service._services)
60def test_local_env_results_no_header(tunable_groups: TunableGroups) -> None:
61 """Fail if the results are not in the expected format."""
62 local_env = create_local_env(
63 tunable_groups,
64 {
65 "run": [
66 # No header
67 "echo 'latency,10' > output.csv",
68 "echo 'throughput,66' >> output.csv",
69 "echo 'score,0.9' >> output.csv",
70 ],
71 "read_results_file": "output.csv",
72 },
73 )
75 with local_env as env_context:
76 assert env_context.setup(tunable_groups)
77 with pytest.raises(ValueError):
78 env_context.run()
81def test_local_env_wide(tunable_groups: TunableGroups) -> None:
82 """Produce benchmark data in wide format and read it."""
83 local_env = create_local_env(
84 tunable_groups,
85 {
86 "run": [
87 "echo 'latency,throughput,score' > output.csv",
88 "echo '10,66,0.9' >> output.csv",
89 ],
90 "read_results_file": "output.csv",
91 },
92 )
94 check_env_success(
95 local_env,
96 tunable_groups,
97 expected_results={
98 "latency": 10,
99 "throughput": 66,
100 "score": 0.9,
101 },
102 expected_telemetry=[],
103 )