Coverage for mlos_bench/mlos_bench/tunables/tunable.py: 96%

288 statements  

« prev     ^ index     » next       coverage.py v7.8.0, created at 2025-04-01 00:52 +0000

1# 

2# Copyright (c) Microsoft Corporation. 

3# Licensed under the MIT License. 

4# 

5""" 

6Definitions for :py:class:`~.Tunable` parameters. 

7 

8Tunable parameters are one of the core building blocks of the :py:mod:`mlos_bench` 

9framework. 

10Together with :py:class:`~mlos_bench.tunables.tunable_groups.TunableGroups`, they 

11provide a description of a configuration parameter space for a benchmark or an 

12autotuning optimization task. 

13 

14Some details about the configuration of an individual :py:class:`~.Tunable` 

15parameter are available in the Examples docstrings below. 

16 

17However, Tunables are generally provided as a part of a 

18:py:class:`~mlos_bench.tunables.tunable_groups.TunableGroups` config specified in a 

19JSON config file. 

20 

21See Also 

22-------- 

23:py:mod:`mlos_bench.tunables` : 

24 For more information on Tunable parameters and their configuration. 

25""" 

26# pylint: disable=too-many-lines # lots of docstring examples 

27 

28import copy 

29import logging 

30from collections.abc import Iterable 

31from typing import Any 

32 

33import json5 as json 

34import numpy as np 

35 

36from mlos_bench.config.schemas import ConfigSchema 

37from mlos_bench.tunables.tunable_types import ( 

38 TUNABLE_DTYPE, 

39 DistributionName, 

40 TunableValue, 

41 TunableValueType, 

42 TunableValueTypeName, 

43 tunable_dict_from_dict, 

44) 

45from mlos_bench.util import nullable 

46 

47_LOG = logging.getLogger(__name__) 

48 

49 

50class Tunable: # pylint: disable=too-many-instance-attributes,too-many-public-methods 

51 """A Tunable parameter definition and its current value.""" 

52 

53 @staticmethod 

54 def from_json(name: str, json_str: str) -> "Tunable": 

55 """ 

56 Create a Tunable object from a JSON string. 

57 

58 Parameters 

59 ---------- 

60 name : str 

61 Human-readable identifier of the Tunable parameter. 

62 json_str : str 

63 JSON string that represents a Tunable. 

64 

65 Returns 

66 ------- 

67 tunable : Tunable 

68 A new Tunable object created from the JSON string. 

69 

70 Notes 

71 ----- 

72 This is mostly for testing purposes. 

73 Generally Tunables will be created as a part of loading 

74 :py:class:`~mlos_bench.tunables.tunable_groups.TunableGroups`. 

75 

76 See Also 

77 -------- 

78 :py:meth:`ConfigPersistenceService.load_tunables <mlos_bench.services.config_persistence.ConfigPersistenceService.load_tunables>` 

79 """ # pylint: disable=line-too-long # noqa: E501 

80 config = json.loads(json_str) 

81 assert isinstance(config, dict) 

82 Tunable._validate_json_config(name, config) 

83 return Tunable(name, config) 

84 

85 @staticmethod 

86 def _validate_json_config(name: str, config: dict) -> None: 

87 """ 

88 Reconstructs a basic json config that this Tunable might have been constructed 

89 with via a TunableGroup for the purposes of schema validation so that we know 

90 our test cases are valid. 

91 

92 Notes 

93 ----- 

94 This is mostly for testing purposes, so we don't call it during normal 

95 Tunable instantiation since it's typically already been done by 

96 TunableGroups. 

97 """ 

98 json_config = { 

99 "group": { 

100 "cost": 1, 

101 "params": {name: config}, 

102 } 

103 } 

104 ConfigSchema.TUNABLE_PARAMS.validate(json_config) 

105 

106 def __init__(self, name: str, config: dict): 

107 """ 

108 Create an instance of a new Tunable parameter. 

109 

110 Parameters 

111 ---------- 

112 name : str 

113 Human-readable identifier of the Tunable parameter. 

114 NOTE: ``!`` characters are currently disallowed in Tunable names in order 

115 handle "special" values sampling logic. 

116 See: :py:mod:`mlos_bench.optimizers.convert_configspace` for details. 

117 config : dict 

118 Python dict that represents a Tunable (e.g., deserialized from JSON) 

119 NOTE: Must be convertible to a 

120 :py:class:`~mlos_bench.tunables.tunable_types.TunableDict`. 

121 

122 See Also 

123 -------- 

124 :py:mod:`mlos_bench.tunables` : 

125 For more information on Tunable parameters and their configuration. 

126 """ 

127 t_config = tunable_dict_from_dict(config) 

128 if not isinstance(name, str) or "!" in name: # TODO: Use a regex here and in JSON schema 

129 raise ValueError(f"Invalid name of the tunable: {name}") 

130 self._name = name 

131 self._type: TunableValueTypeName = t_config["type"] # required 

132 if self._type not in TUNABLE_DTYPE: 

133 raise ValueError(f"Invalid parameter type: {self._type}") 

134 self._description = t_config.get("description") 

135 self._default = t_config["default"] 

136 self._default = self.dtype(self._default) if self._default is not None else self._default 

137 self._values = t_config.get("values") 

138 if self._values: 

139 self._values = [str(v) if v is not None else v for v in self._values] 

140 self._meta: dict[str, Any] = t_config.get("meta", {}) 

141 self._range: tuple[int, int] | tuple[float, float] | None = None 

142 self._quantization_bins: int | None = t_config.get("quantization_bins") 

143 self._log: bool | None = t_config.get("log") 

144 self._distribution: DistributionName | None = None 

145 self._distribution_params: dict[str, float] = {} 

146 distr = t_config.get("distribution") 

147 if distr: 

148 self._distribution = distr["type"] # required 

149 self._distribution_params = distr.get("params") or {} 

150 config_range = config.get("range") 

151 if config_range is not None: 

152 assert len(config_range) == 2, f"Invalid range: {config_range}" 

153 config_range = (config_range[0], config_range[1]) 

154 self._range = config_range 

155 self._special: list[int] | list[float] = t_config.get("special") or [] 

156 self._weights: list[float] = ( 

157 t_config.get("values_weights") or t_config.get("special_weights") or [] 

158 ) 

159 self._range_weight: float | None = t_config.get("range_weight") 

160 self._current_value = None 

161 self._sanity_check() 

162 self.value = self._default 

163 

164 def _sanity_check(self) -> None: 

165 """Check if the status of the Tunable is valid, and throw ValueError if it is 

166 not. 

167 """ 

168 if self.is_categorical: 

169 self._sanity_check_categorical() 

170 elif self.is_numerical: 

171 self._sanity_check_numerical() 

172 else: 

173 raise ValueError(f"Invalid parameter type for tunable {self}: {self._type}") 

174 if not self.is_valid(self.default): 

175 raise ValueError(f"Invalid default value for tunable {self}: {self.default}") 

176 

177 def _sanity_check_categorical(self) -> None: 

178 """Check if the status of the categorical Tunable is valid, and throw ValueError 

179 if it is not. 

180 """ 

181 # pylint: disable=too-complex 

182 assert self.is_categorical 

183 if not (self._values and isinstance(self._values, Iterable)): 

184 raise ValueError(f"Must specify values for the categorical type tunable {self}") 

185 if self._range is not None: 

186 raise ValueError(f"Range must be None for the categorical type tunable {self}") 

187 if len(set(self._values)) != len(self._values): 

188 raise ValueError(f"Values must be unique for the categorical type tunable {self}") 

189 if self._special: 

190 raise ValueError(f"Categorical tunable cannot have special values: {self}") 

191 if self._range_weight is not None: 

192 raise ValueError(f"Categorical tunable cannot have range_weight: {self}") 

193 if self._log is not None: 

194 raise ValueError(f"Categorical tunable cannot have log parameter: {self}") 

195 if self._quantization_bins is not None: 

196 raise ValueError(f"Categorical tunable cannot have quantization parameter: {self}") 

197 if self._distribution is not None: 

198 raise ValueError(f"Categorical parameters do not support `distribution`: {self}") 

199 if self._weights: 

200 if len(self._weights) != len(self._values): 

201 raise ValueError(f"Must specify weights for all values: {self}") 

202 if any(w < 0 for w in self._weights): 

203 raise ValueError(f"All weights must be non-negative: {self}") 

204 

205 def _sanity_check_numerical(self) -> None: 

206 """Check if the status of the numerical Tunable is valid, and throw ValueError 

207 if it is not. 

208 """ 

209 # pylint: disable=too-complex,too-many-branches 

210 assert self.is_numerical 

211 if self._values is not None: 

212 raise ValueError(f"Values must be None for the numerical type tunable {self}") 

213 if not self._range or len(self._range) != 2 or self._range[0] >= self._range[1]: 

214 raise ValueError(f"Invalid range for tunable {self}: {self._range}") 

215 if self._quantization_bins is not None and self._quantization_bins <= 1: 

216 raise ValueError(f"Number of quantization bins is <= 1: {self}") 

217 if self._distribution is not None and self._distribution not in { 

218 "uniform", 

219 "normal", 

220 "beta", 

221 }: 

222 raise ValueError(f"Invalid distribution: {self}") 

223 if self._distribution_params and self._distribution is None: 

224 raise ValueError(f"Must specify the distribution: {self}") 

225 if self._weights: 

226 if self._range_weight is None: 

227 raise ValueError(f"Must specify weight for the range: {self}") 

228 if len(self._weights) != len(self._special): 

229 raise ValueError("Must specify weights for all special values {self}") 

230 if any(w < 0 for w in self._weights + [self._range_weight]): 

231 raise ValueError(f"All weights must be non-negative: {self}") 

232 elif self._range_weight is not None: 

233 raise ValueError(f"Must specify both weights and range_weight or none: {self}") 

234 

235 def __repr__(self) -> str: 

236 """ 

237 Produce a human-readable version of the Tunable (mostly for logging). 

238 

239 Returns 

240 ------- 

241 string : str 

242 A human-readable version of the Tunable. 

243 """ 

244 # TODO? Add weights, specials, quantization, distribution? 

245 if self.is_categorical: 

246 return ( 

247 f"{self._name}[{self._type}]({self._values}:{self._default})={self._current_value}" 

248 ) 

249 return f"{self._name}[{self._type}]({self._range}:{self._default})={self._current_value}" 

250 

251 def __eq__(self, other: object) -> bool: 

252 """ 

253 Check if two Tunable objects are equal. 

254 

255 Parameters 

256 ---------- 

257 other : Tunable 

258 A tunable object to compare to. 

259 

260 Returns 

261 ------- 

262 is_equal : bool 

263 True if the Tunables correspond to the same parameter and have the same value and type. 

264 NOTE: ranges and special values are not currently considered in the comparison. 

265 """ 

266 if not isinstance(other, Tunable): 

267 return False 

268 return bool( 

269 self._name == other._name 

270 and self._type == other._type 

271 and self._current_value == other._current_value 

272 ) 

273 

274 def __lt__(self, other: object) -> bool: # pylint: disable=too-many-return-statements 

275 """ 

276 Compare the two Tunable objects. 

277 

278 We mostly need this to create a canonical list of Tunable objects when 

279 hashing a :py:class:`~mlos_bench.tunables.tunable_groups.TunableGroups`. 

280 

281 Parameters 

282 ---------- 

283 other : Tunable 

284 A tunable object to compare to. 

285 

286 Returns 

287 ------- 

288 is_less : bool 

289 True if the current Tunable is less then the other one, False otherwise. 

290 """ 

291 if not isinstance(other, Tunable): 

292 return False 

293 if self._name < other._name: 

294 return True 

295 if self._name == other._name and self._type < other._type: 

296 return True 

297 if self._name == other._name and self._type == other._type: 

298 if self.is_numerical: 

299 assert self._current_value is not None 

300 assert other._current_value is not None 

301 return bool(float(self._current_value) < float(other._current_value)) 

302 # else: categorical 

303 if self._current_value is None: 

304 return True 

305 if other._current_value is None: 

306 return False 

307 return bool(str(self._current_value) < str(other._current_value)) 

308 return False 

309 

310 def copy(self) -> "Tunable": 

311 """ 

312 Deep copy of the Tunable object. 

313 

314 Returns 

315 ------- 

316 tunable : Tunable 

317 A new Tunable object that is a deep copy of the original one. 

318 """ 

319 return copy.deepcopy(self) 

320 

321 @property 

322 def description(self) -> str | None: 

323 """Get the description of the Tunable.""" 

324 return self._description 

325 

326 @property 

327 def default(self) -> TunableValue: 

328 """Get the default value of the Tunable.""" 

329 return self._default 

330 

331 def is_default(self) -> bool: 

332 """Checks whether the currently assigned value of the Tunable is at its 

333 default. 

334 """ 

335 return self._default == self._current_value 

336 

337 @property 

338 def value(self) -> TunableValue: 

339 """Get the current value of the Tunable.""" 

340 return self._current_value 

341 

342 @value.setter 

343 def value(self, value: TunableValue) -> TunableValue: 

344 """Set the current value of the Tunable.""" 

345 # We need this coercion for the values produced by some optimizers 

346 # (e.g., scikit-optimize) and for data restored from certain storage 

347 # systems (where values can be strings). 

348 try: 

349 if self.is_categorical and value is None: 

350 coerced_value = None 

351 else: 

352 assert value is not None 

353 coerced_value = self.dtype(value) 

354 except Exception: 

355 _LOG.error( 

356 "Impossible conversion: %s %s <- %s %s", 

357 self._type, 

358 self._name, 

359 type(value), 

360 value, 

361 ) 

362 raise 

363 

364 if self._type == "int" and isinstance(value, float) and value != coerced_value: 

365 _LOG.error( 

366 "Loss of precision: %s %s <- %s %s", 

367 self._type, 

368 self._name, 

369 type(value), 

370 value, 

371 ) 

372 raise ValueError(f"Loss of precision: {self._name}={value}") 

373 

374 if not self.is_valid(coerced_value): 

375 _LOG.error( 

376 "Invalid assignment: %s %s <- %s %s", 

377 self._type, 

378 self._name, 

379 type(value), 

380 value, 

381 ) 

382 raise ValueError(f"Invalid value for the Tunable: {self._name}={value}") 

383 

384 self._current_value = coerced_value 

385 return self._current_value 

386 

387 def update(self, value: TunableValue) -> bool: 

388 """ 

389 Assign the value to the Tunable. Return True if it is a new value, False 

390 otherwise. 

391 

392 Parameters 

393 ---------- 

394 value : int | float | str 

395 Value to assign. 

396 

397 Returns 

398 ------- 

399 is_updated : bool 

400 True if the new value is different from the previous one, False otherwise. 

401 """ 

402 prev_value = self._current_value 

403 self.value = value 

404 return prev_value != self._current_value 

405 

406 def is_valid(self, value: TunableValue) -> bool: 

407 """ 

408 Check if the value can be assigned to the Tunable. 

409 

410 Parameters 

411 ---------- 

412 value : int | float | str 

413 Value to validate. 

414 

415 Returns 

416 ------- 

417 is_valid : bool 

418 True if the value is valid, False otherwise. 

419 """ 

420 if self.is_categorical and self._values: 

421 return value in self._values 

422 elif self.is_numerical and self._range: 

423 if isinstance(value, (int, float)): 

424 return self.in_range(value) or value in self._special 

425 else: 

426 raise ValueError(f"Invalid value type for Tunable {self}: {value}={type(value)}") 

427 else: 

428 raise ValueError(f"Invalid parameter type: {self._type}") 

429 

430 def in_range(self, value: int | float | str | None) -> bool: 

431 """ 

432 Check if the value is within the range of the Tunable. 

433 

434 Do *NOT* check for special values. Return False if the Tunable or value is 

435 categorical or None. 

436 """ 

437 return ( 

438 isinstance(value, (float, int)) 

439 and self.is_numerical 

440 and self._range is not None 

441 and bool(self._range[0] <= value <= self._range[1]) 

442 ) 

443 

444 @property 

445 def category(self) -> str | None: 

446 """Get the current value of the Tunable as a string.""" 

447 if self.is_categorical: 

448 return nullable(str, self._current_value) 

449 else: 

450 raise ValueError("Cannot get categorical values for a numerical Tunable.") 

451 

452 @category.setter 

453 def category(self, new_value: str | None) -> str | None: 

454 """Set the current value of the Tunable.""" 

455 assert self.is_categorical 

456 assert isinstance(new_value, (str, type(None))) 

457 self.value = new_value 

458 return self.value 

459 

460 @property 

461 def numerical_value(self) -> int | float: 

462 """Get the current value of the Tunable as a number.""" 

463 assert self._current_value is not None 

464 if self._type == "int": 

465 return int(self._current_value) 

466 elif self._type == "float": 

467 return float(self._current_value) 

468 else: 

469 raise ValueError("Cannot get numerical value for a categorical Tunable.") 

470 

471 @numerical_value.setter 

472 def numerical_value(self, new_value: int | float) -> int | float: 

473 """Set the current numerical value of the Tunable.""" 

474 # We need this coercion for the values produced by some optimizers 

475 # (e.g., scikit-optimize) and for data restored from certain storage 

476 # systems (where values can be strings). 

477 assert self.is_numerical 

478 self.value = new_value 

479 return self.value 

480 

481 @property 

482 def name(self) -> str: 

483 """Get the name / string ID of the Tunable.""" 

484 return self._name 

485 

486 @property 

487 def special(self) -> list[int] | list[float]: 

488 """ 

489 Get the special values of the Tunable. Return an empty list if there are none. 

490 

491 Special values are used to mark some values as "special" that need more 

492 explicit testing. For example, these might indicate "automatic" or 

493 "disabled" behavior for the system being tested instead of an explicit size 

494 and hence need more explicit sampling. 

495 

496 Notes 

497 ----- 

498 Only numerical Tunable parameters can have special values. 

499 

500 Returns 

501 ------- 

502 special : [int] | [float] 

503 A list of special values of the Tunable. Can be empty. 

504 

505 Examples 

506 -------- 

507 >>> # Example values of the special values 

508 >>> json_config = ''' 

509 ... { 

510 ... "type": "int", 

511 ... "default": 50, 

512 ... "range": [1, 100], 

513 ... // These are special and sampled 

514 ... // Note that the types don't need to match or be in the range. 

515 ... "special": [ 

516 ... -1, // e.g., auto 

517 ... 0, // e.g., disabled 

518 ... true, // e.g., enabled 

519 ... null, // e.g., unspecified 

520 ... ], 

521 ... } 

522 ... ''' 

523 >>> tunable = Tunable.from_json("tunable_with_special", json_config) 

524 >>> # JSON values are converted to Python types 

525 >>> tunable.special 

526 [-1, 0, True, None] 

527 """ 

528 if not self.is_numerical: 

529 assert not self._special 

530 return [] 

531 return self._special 

532 

533 @property 

534 def is_special(self) -> bool: 

535 """ 

536 Check if the current value of the Tunable is special. 

537 

538 Returns 

539 ------- 

540 is_special : bool 

541 True if the current value of the Tunable is special, False otherwise. 

542 """ 

543 return self.value in self._special 

544 

545 @property 

546 def weights(self) -> list[float] | None: 

547 """ 

548 Get the weights of the categories or special values of the Tunable. Return None 

549 if there are none. 

550 

551 Returns 

552 ------- 

553 weights : [float] 

554 A list of weights or None. 

555 

556 Examples 

557 -------- 

558 >>> json_config = ''' 

559 ... { 

560 ... "type": "categorical", 

561 ... "default": "red", 

562 ... "values": ["red", "blue", "green"], 

563 ... "values_weights": [0.1, 0.2, 0.7], 

564 ... } 

565 ... ''' 

566 >>> categorical_tunable = Tunable.from_json("categorical_tunable", json_config) 

567 >>> categorical_tunable.weights 

568 [0.1, 0.2, 0.7] 

569 >>> dict(zip(categorical_tunable.values, categorical_tunable.weights)) 

570 {'red': 0.1, 'blue': 0.2, 'green': 0.7} 

571 

572 >>> json_config = ''' 

573 ... { 

574 ... "type": "float", 

575 ... "default": 50.0, 

576 ... "range": [1, 100], 

577 ... "special": [-1, 0], 

578 ... "special_weights": [0.1, 0.2], 

579 ... "range_weight": 0.7, 

580 ... } 

581 ... ''' 

582 >>> float_tunable = Tunable.from_json("float_tunable", json_config) 

583 >>> float_tunable.weights 

584 [0.1, 0.2] 

585 >>> dict(zip(float_tunable.special, float_tunable.weights)) 

586 {-1: 0.1, 0: 0.2} 

587 """ 

588 return self._weights 

589 

590 @property 

591 def range_weight(self) -> float | None: 

592 """ 

593 Get weight of the range of the numeric Tunable. Return None if there are no 

594 weights or a Tunable is categorical. 

595 

596 Returns 

597 ------- 

598 weight : float 

599 Weight of the range or None. 

600 

601 See Also 

602 -------- 

603 Tunable.weights : For example of range_weight configuration. 

604 """ 

605 assert self.is_numerical 

606 assert self._special 

607 assert self._weights 

608 return self._range_weight 

609 

610 @property 

611 def type(self) -> TunableValueTypeName: 

612 """ 

613 Get the string name of the data type of the Tunable. 

614 

615 Returns 

616 ------- 

617 type : TunableValueTypeName 

618 String representation of the data type of the Tunable. 

619 

620 Examples 

621 -------- 

622 >>> # Example values of the TunableValueTypeName 

623 >>> from mlos_bench.tunables.tunable_types import TunableValueTypeName 

624 >>> TunableValueTypeName 

625 typing.Literal['int', 'float', 'categorical'] 

626 

627 Examples 

628 -------- 

629 >>> json_config = ''' 

630 ... { 

631 ... "type": "categorical", 

632 ... "default": "red", 

633 ... "values": ["red", "blue", "green"], 

634 ... } 

635 ... ''' 

636 >>> categorical_tunable = Tunable.from_json("categorical_tunable", json_config) 

637 >>> categorical_tunable.type 

638 'categorical' 

639 

640 >>> json_config = ''' 

641 ... { 

642 ... "type": "int", 

643 ... "default": 0, 

644 ... "range": [0, 10000], 

645 ... } 

646 ... ''' 

647 >>> int_tunable = Tunable.from_json("int_tunable", json_config) 

648 >>> int_tunable.type 

649 'int' 

650 

651 >>> json_config = ''' 

652 ... { 

653 ... "type": "float", 

654 ... "default": 0.0, 

655 ... "range": [0.0, 10000.0], 

656 ... } 

657 ... ''' 

658 >>> float_tunable = Tunable.from_json("float_tunable", json_config) 

659 >>> float_tunable.type 

660 'float' 

661 """ 

662 return self._type 

663 

664 @property 

665 def dtype(self) -> TunableValueType: 

666 """ 

667 Get the actual Python data type of the Tunable. 

668 

669 This is useful for bulk conversions of the input data. 

670 

671 Returns 

672 ------- 

673 dtype : type 

674 Data type of the Tunable - one of: 

675 ``{int, float, str}`` 

676 

677 Examples 

678 -------- 

679 >>> # Example values of the TunableValueType 

680 >>> from mlos_bench.tunables.tunable_types import TunableValueType 

681 >>> TunableValueType 

682 type[int] | type[float] | type[str] 

683 

684 >>> # Example values of the TUNABLE_DTYPE 

685 >>> from mlos_bench.tunables.tunable_types import TUNABLE_DTYPE 

686 >>> TUNABLE_DTYPE 

687 {'int': <class 'int'>, 'float': <class 'float'>, 'categorical': <class 'str'>} 

688 """ 

689 return TUNABLE_DTYPE[self._type] 

690 

691 @property 

692 def is_categorical(self) -> bool: 

693 """ 

694 Check if the Tunable is categorical. 

695 

696 Returns 

697 ------- 

698 is_categorical : bool 

699 True if the Tunable is categorical, False otherwise. 

700 """ 

701 return self._type == "categorical" 

702 

703 @property 

704 def is_numerical(self) -> bool: 

705 """ 

706 Check if the Tunable is an integer or float. 

707 

708 Returns 

709 ------- 

710 is_int : bool 

711 True if the Tunable is an integer or float, False otherwise. 

712 """ 

713 return self._type in {"int", "float"} 

714 

715 @property 

716 def range(self) -> tuple[int, int] | tuple[float, float]: 

717 """ 

718 Get the range of the Tunable if it is numerical, None otherwise. 

719 

720 Returns 

721 ------- 

722 range : tuple[int, int] | tuple[float, float] 

723 A 2-tuple of numbers that represents the range of the Tunable. 

724 Numbers can be int or float, depending on the type of the Tunable. 

725 

726 Examples 

727 -------- 

728 >>> json_config = ''' 

729 ... { 

730 ... "type": "int", 

731 ... "default": 0, 

732 ... "range": [0, 10000], 

733 ... } 

734 ... ''' 

735 >>> int_tunable = Tunable.from_json("int_tunable", json_config) 

736 >>> int_tunable.range 

737 (0, 10000) 

738 

739 >>> json_config = ''' 

740 ... { 

741 ... "type": "float", 

742 ... "default": 0.0, 

743 ... "range": [0.0, 100.0], 

744 ... } 

745 ... ''' 

746 >>> float_tunable = Tunable.from_json("float_tunable", json_config) 

747 >>> float_tunable.range 

748 (0.0, 100.0) 

749 """ 

750 assert self.is_numerical 

751 assert self._range is not None 

752 return self._range 

753 

754 @property 

755 def span(self) -> int | float: 

756 """ 

757 Gets the span of the range. 

758 

759 Note: this does not take quantization into account. 

760 

761 Returns 

762 ------- 

763 int | float 

764 (max - min) for numerical Tunables. 

765 """ 

766 num_range = self.range 

767 return num_range[1] - num_range[0] 

768 

769 @property 

770 def quantization_bins(self) -> int | None: 

771 """ 

772 Get the number of quantization bins, if specified. 

773 

774 Returns 

775 ------- 

776 quantization_bins : int | None 

777 Number of quantization bins, or None. 

778 

779 Examples 

780 -------- 

781 >>> json_config = ''' 

782 ... { 

783 ... "type": "int", 

784 ... "default": 0, 

785 ... "range": [0, 10000], 

786 ... // Enable quantization. 

787 ... "quantization_bins": 11, 

788 ... } 

789 ... ''' 

790 >>> quantized_tunable = Tunable.from_json("quantized_tunable", json_config) 

791 >>> quantized_tunable.quantization_bins 

792 11 

793 >>> list(quantized_tunable.quantized_values) 

794 [0, 1000, 2000, 3000, 4000, 5000, 6000, 7000, 8000, 9000, 10000] 

795 

796 >>> json_config = ''' 

797 ... { 

798 ... "type": "float", 

799 ... "default": 0, 

800 ... "range": [0, 1], 

801 ... // Enable quantization. 

802 ... "quantization_bins": 5, 

803 ... } 

804 ... ''' 

805 >>> quantized_tunable = Tunable.from_json("quantized_tunable", json_config) 

806 >>> quantized_tunable.quantization_bins 

807 5 

808 >>> list(quantized_tunable.quantized_values) 

809 [0.0, 0.25, 0.5, 0.75, 1.0] 

810 """ 

811 if self.is_categorical: 

812 return None 

813 return self._quantization_bins 

814 

815 @property 

816 def quantized_values(self) -> Iterable[int] | Iterable[float] | None: 

817 """ 

818 Get a sequence of quantized values for this Tunable. 

819 

820 Returns 

821 ------- 

822 Iterable[int] | Iterable[float] | None 

823 If the Tunable is quantizable, returns a sequence of those elements, 

824 else None (e.g., for unquantized float type Tunables). 

825 

826 See Also 

827 -------- 

828 :py:attr:`~.Tunable.quantization_bins` : 

829 For more examples on configuring a Tunable with quantization. 

830 """ 

831 num_range = self.range 

832 if self.type == "float": 

833 if not self.quantization_bins: 

834 return None 

835 # Be sure to return python types instead of numpy types. 

836 return ( 

837 float(x) 

838 for x in np.linspace( 

839 start=num_range[0], 

840 stop=num_range[1], 

841 num=self.quantization_bins, 

842 endpoint=True, 

843 ) 

844 ) 

845 assert self.type == "int", f"Unhandled Tunable type: {self}" 

846 return range( 

847 int(num_range[0]), 

848 int(num_range[1]) + 1, 

849 int(self.span / (self.quantization_bins - 1)) if self.quantization_bins else 1, 

850 ) 

851 

852 @property 

853 def cardinality(self) -> int | None: 

854 """ 

855 Gets the cardinality of elements in this Tunable, or else None (e.g., when the 

856 Tunable is continuous float and not quantized). 

857 

858 If the Tunable has quantization set, this returns the number of quantization bins. 

859 

860 Returns 

861 ------- 

862 cardinality : int 

863 Either the number of points in the Tunable or else None. 

864 

865 Examples 

866 -------- 

867 >>> json_config = ''' 

868 ... { 

869 ... "type": "categorical", 

870 ... "default": "red", 

871 ... "values": ["red", "blue", "green"], 

872 ... } 

873 ... ''' 

874 >>> categorical_tunable = Tunable.from_json("categorical_tunable", json_config) 

875 >>> categorical_tunable.cardinality 

876 3 

877 

878 >>> json_config = ''' 

879 ... { 

880 ... "type": "int", 

881 ... "default": 0, 

882 ... "range": [0, 10000], 

883 ... } 

884 ... ''' 

885 >>> basic_tunable = Tunable.from_json("basic_tunable", json_config) 

886 >>> basic_tunable.cardinality 

887 10001 

888 

889 >>> json_config = ''' 

890 ... { 

891 ... "type": "int", 

892 ... "default": 0, 

893 ... "range": [0, 10000], 

894 ... // Enable quantization. 

895 ... "quantization_bins": 10, 

896 ... } 

897 ... ''' 

898 >>> quantized_tunable = Tunable.from_json("quantized_tunable", json_config) 

899 >>> quantized_tunable.cardinality 

900 10 

901 

902 >>> json_config = ''' 

903 ... { 

904 ... "type": "float", 

905 ... "default": 50.0, 

906 ... "range": [0, 100], 

907 ... } 

908 ... ''' 

909 >>> float_tunable = Tunable.from_json("float_tunable", json_config) 

910 >>> assert float_tunable.cardinality is None 

911 """ 

912 if self.is_categorical: 

913 return len(self.categories) 

914 if self.quantization_bins: 

915 return self.quantization_bins 

916 if self.type == "int": 

917 return int(self.span) + 1 

918 return None 

919 

920 @property 

921 def is_log(self) -> bool | None: 

922 """ 

923 Check if numeric Tunable is log scale. 

924 

925 Returns 

926 ------- 

927 log : bool 

928 True if numeric Tunable is log scale, False if linear. 

929 

930 Examples 

931 -------- 

932 >>> # Example values of the log scale 

933 >>> json_config = ''' 

934 ... { 

935 ... "type": "int", 

936 ... "default": 0, 

937 ... "range": [0, 10000], 

938 ... // Enable log sampling. 

939 ... "log": true, 

940 ... } 

941 ... ''' 

942 >>> tunable = Tunable.from_json("log_tunable", json_config) 

943 >>> tunable.is_log 

944 True 

945 """ 

946 assert self.is_numerical 

947 return self._log 

948 

949 @property 

950 def distribution(self) -> DistributionName | None: 

951 """ 

952 Get the name of the distribution if specified. 

953 

954 Returns 

955 ------- 

956 distribution : str | None 

957 Name of the distribution or None. 

958 

959 See Also 

960 -------- 

961 :py:attr:`~.Tunable.distribution_params` : 

962 For more examples on configuring a Tunable with a distribution. 

963 

964 Examples 

965 -------- 

966 >>> # Example values of the DistributionName 

967 >>> from mlos_bench.tunables.tunable_types import DistributionName 

968 >>> DistributionName 

969 typing.Literal['uniform', 'normal', 'beta'] 

970 """ 

971 return self._distribution 

972 

973 @property 

974 def distribution_params(self) -> dict[str, float]: 

975 """ 

976 Get the parameters of the distribution, if specified. 

977 

978 Returns 

979 ------- 

980 distribution_params : dict[str, float] 

981 Parameters of the distribution or None. 

982 

983 Examples 

984 -------- 

985 >>> json_config = ''' 

986 ... { 

987 ... "type": "int", 

988 ... "default": 0, 

989 ... "range": [0, 10], 

990 ... // No distribution specified. 

991 ... } 

992 ... ''' 

993 >>> base_config = json.loads(json_config) 

994 >>> basic_tunable = Tunable("basic_tunable", base_config) 

995 >>> assert basic_tunable.distribution is None 

996 >>> basic_tunable.distribution_params 

997 {} 

998 

999 >>> # Example of a uniform distribution (the default if not specified) 

1000 >>> config_with_dist = base_config | { 

1001 ... "distribution": { 

1002 ... "type": "uniform" 

1003 ... } 

1004 ... } 

1005 >>> uniform_tunable = Tunable("uniform_tunable", config_with_dist) 

1006 >>> uniform_tunable.distribution 

1007 'uniform' 

1008 >>> uniform_tunable.distribution_params 

1009 {} 

1010 

1011 >>> # Example of a normal distribution params 

1012 >>> config_with_dist = base_config | { 

1013 ... "distribution": { 

1014 ... "type": "normal", 

1015 ... "params": { 

1016 ... "mu": 0.0, 

1017 ... "sigma": 1.0, 

1018 ... } 

1019 ... } 

1020 ... } 

1021 >>> normal_tunable = Tunable("normal_tunable", config_with_dist) 

1022 >>> normal_tunable.distribution 

1023 'normal' 

1024 >>> normal_tunable.distribution_params 

1025 {'mu': 0.0, 'sigma': 1.0} 

1026 

1027 >>> # Example of a beta distribution params 

1028 >>> config_with_dist = base_config | { 

1029 ... "distribution": { 

1030 ... "type": "beta", 

1031 ... "params": { 

1032 ... "alpha": 1.0, 

1033 ... "beta": 1.0, 

1034 ... } 

1035 ... } 

1036 ... } 

1037 >>> beta_tunable = Tunable("beta_tunable", config_with_dist) 

1038 >>> beta_tunable.distribution 

1039 'beta' 

1040 >>> beta_tunable.distribution_params 

1041 {'alpha': 1.0, 'beta': 1.0} 

1042 """ 

1043 return self._distribution_params 

1044 

1045 @property 

1046 def categories(self) -> list[str | None]: 

1047 """ 

1048 Get the list of all possible values of a categorical Tunable. Return None if the 

1049 Tunable is not categorical. 

1050 

1051 Returns 

1052 ------- 

1053 values : list[str] 

1054 List of all possible values of a categorical Tunable. 

1055 

1056 See Also 

1057 -------- 

1058 Tunable.values : For more examples on getting the categorical values of a Tunable. 

1059 """ 

1060 assert self.is_categorical 

1061 assert self._values is not None 

1062 return self._values 

1063 

1064 @property 

1065 def values(self) -> Iterable[str | None] | Iterable[int] | Iterable[float] | None: 

1066 """ 

1067 Gets the :py:attr:`~.Tunable.categories` or 

1068 :py:attr:`~.Tunable.quantized_values` for this Tunable. 

1069 

1070 Returns 

1071 ------- 

1072 Iterable[str | None] | Iterable[int] | Iterable[float] | None 

1073 Categories or quantized values. 

1074 

1075 Examples 

1076 -------- 

1077 >>> # Example values of the Tunable categories 

1078 >>> json_config = ''' 

1079 ... { 

1080 ... "type": "categorical", 

1081 ... "values": ["red", "blue", "green"], 

1082 ... "default": "red", 

1083 ... } 

1084 ... ''' 

1085 >>> categorical_tunable = Tunable.from_json("categorical_tunable", json_config) 

1086 >>> list(categorical_tunable.values) 

1087 ['red', 'blue', 'green'] 

1088 >>> assert categorical_tunable.values == categorical_tunable.categories 

1089 

1090 >>> # Example values of the Tunable int 

1091 >>> json_config = ''' 

1092 ... { 

1093 ... "type": "int", 

1094 ... "range": [0, 5], 

1095 ... "default": 1, 

1096 ... } 

1097 ... ''' 

1098 >>> int_tunable = Tunable.from_json("int_tunable", json_config) 

1099 >>> list(int_tunable.values) 

1100 [0, 1, 2, 3, 4, 5] 

1101 

1102 >>> # Example values of the quantized Tunable float 

1103 >>> json_config = ''' 

1104 ... { 

1105 ... "type": "float", 

1106 ... "range": [0, 1], 

1107 ... "default": 0.5, 

1108 ... "quantization_bins": 3, 

1109 ... } 

1110 ... ''' 

1111 >>> float_tunable = Tunable.from_json("float_tunable", json_config) 

1112 >>> list(float_tunable.values) 

1113 [0.0, 0.5, 1.0] 

1114 """ 

1115 if self.is_categorical: 

1116 return self.categories 

1117 assert self.is_numerical 

1118 return self.quantized_values 

1119 

1120 @property 

1121 def meta(self) -> dict[str, Any]: 

1122 """ 

1123 Get the Tunable's metadata. 

1124 

1125 This is a free-form dictionary that can be used to store any additional 

1126 information about the Tunable (e.g., the unit information) which can be 

1127 useful when using the ``dump_params_file`` and ``dump_meta_file`` 

1128 properties of the :py:class:`~mlos_bench.environments` config to 

1129 generate a configuration file for the target system. 

1130 

1131 Examples 

1132 -------- 

1133 >>> json_config = ''' 

1134 ... { 

1135 ... "type": "int", 

1136 ... "range": [0, 10], 

1137 ... "default": 1, 

1138 ... "meta": { 

1139 ... "unit": "seconds", 

1140 ... }, 

1141 ... "description": "Time to wait before timing out a request.", 

1142 ... } 

1143 ... ''' 

1144 >>> tunable = Tunable.from_json("timer_tunable", json_config) 

1145 >>> tunable.meta 

1146 {'unit': 'seconds'} 

1147 """ 

1148 return self._meta