Coverage for mlos_bench/mlos_bench/tests/optimizers/mock_opt_test.py: 100%

37 statements  

« prev     ^ index     » next       coverage.py v7.5.1, created at 2024-05-06 00:35 +0000

1# 

2# Copyright (c) Microsoft Corporation. 

3# Licensed under the MIT License. 

4# 

5""" 

6Unit tests for mock mlos_bench optimizer. 

7""" 

8 

9import pytest 

10 

11from mlos_bench.environments.status import Status 

12from mlos_bench.optimizers.mock_optimizer import MockOptimizer 

13 

14# pylint: disable=redefined-outer-name 

15 

16 

17@pytest.fixture 

18def mock_configurations_no_defaults() -> list: 

19 """ 

20 A list of 2-tuples of (tunable_values, score) to test the optimizers. 

21 """ 

22 return [ 

23 ({ 

24 "vmSize": "Standard_B4ms", 

25 "idle": "halt", 

26 "kernel_sched_migration_cost_ns": 13112, 

27 "kernel_sched_latency_ns": 796233790, 

28 }, 88.88), 

29 ({ 

30 "vmSize": "Standard_B2ms", 

31 "idle": "halt", 

32 "kernel_sched_migration_cost_ns": 117026, 

33 "kernel_sched_latency_ns": 149827706, 

34 }, 66.66), 

35 ({ 

36 "vmSize": "Standard_B4ms", 

37 "idle": "halt", 

38 "kernel_sched_migration_cost_ns": 354785, 

39 "kernel_sched_latency_ns": 795285932, 

40 }, 99.99), 

41 ] 

42 

43 

44@pytest.fixture 

45def mock_configurations(mock_configurations_no_defaults: list) -> list: 

46 """ 

47 A list of 2-tuples of (tunable_values, score) to test the optimizers. 

48 """ 

49 return [ 

50 ({ 

51 "vmSize": "Standard_B4ms", 

52 "idle": "halt", 

53 "kernel_sched_migration_cost_ns": -1, 

54 "kernel_sched_latency_ns": 2000000, 

55 }, 88.88), 

56 ] + mock_configurations_no_defaults 

57 

58 

59def _optimize(mock_opt: MockOptimizer, mock_configurations: list) -> float: 

60 """ 

61 Run several iterations of the optimizer and return the best score. 

62 """ 

63 for (tunable_values, score) in mock_configurations: 

64 assert mock_opt.not_converged() 

65 tunables = mock_opt.suggest() 

66 assert tunables.get_param_values() == tunable_values 

67 mock_opt.register(tunables, Status.SUCCEEDED, score) 

68 

69 (score, _tunables) = mock_opt.get_best_observation() 

70 assert score is not None 

71 assert isinstance(score, float) 

72 return score 

73 

74 

75def test_mock_optimizer(mock_opt: MockOptimizer, mock_configurations: list) -> None: 

76 """ 

77 Make sure that mock optimizer produces consistent suggestions. 

78 """ 

79 score = _optimize(mock_opt, mock_configurations) 

80 assert score == pytest.approx(66.66, 0.01) 

81 

82 

83def test_mock_optimizer_no_defaults(mock_opt_no_defaults: MockOptimizer, 

84 mock_configurations_no_defaults: list) -> None: 

85 """ 

86 Make sure that mock optimizer produces consistent suggestions. 

87 """ 

88 score = _optimize(mock_opt_no_defaults, mock_configurations_no_defaults) 

89 assert score == pytest.approx(66.66, 0.01) 

90 

91 

92def test_mock_optimizer_max(mock_opt_max: MockOptimizer, mock_configurations: list) -> None: 

93 """ 

94 Check the maximization mode of the mock optimizer. 

95 """ 

96 score = _optimize(mock_opt_max, mock_configurations) 

97 assert score == pytest.approx(99.99, 0.01) 

98 

99 

100def test_mock_optimizer_register_fail(mock_opt: MockOptimizer) -> None: 

101 """ 

102 Check the input acceptance conditions for Optimizer.register(). 

103 """ 

104 tunables = mock_opt.suggest() 

105 mock_opt.register(tunables, Status.SUCCEEDED, 10) 

106 mock_opt.register(tunables, Status.FAILED) 

107 with pytest.raises(ValueError): 

108 mock_opt.register(tunables, Status.SUCCEEDED, None) 

109 with pytest.raises(ValueError): 

110 mock_opt.register(tunables, Status.FAILED, 10)